Importing all the required modules:
import warnings
warnings.filterwarnings('ignore')
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import plotly.express as px
import missingno as msno
#from ydata_profiling import ProfileReport
from sklearn import set_config
from sklearn.pipeline import Pipeline
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import StandardScaler, OneHotEncoder, LabelEncoder
from sklearn.compose import ColumnTransformer, make_column_selector
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import GridSearchCV, train_test_split
from sklearn.feature_selection import SelectKBest, f_classif, RFE
from sklearn.decomposition import PCA
from sklearn.cluster import DBSCAN, KMeans
from sklearn.metrics import silhouette_score, accuracy_score, confusion_matrix, precision_score, recall_score, f1_score, roc_curve, auc
from pyclustering.cluster.clarans import clarans;
from pyclustering.utils import timedcall;
import tensorflow as tf
from keras.models import Model
from keras.layers import Dense, concatenate
import xgboost as xgb
from xgboost import XGBClassifier
WARNING:tensorflow:From C:\Anaconda\Lib\site-packages\keras\src\losses.py:2976: The name tf.losses.sparse_softmax_cross_entropy is deprecated. Please use tf.compat.v1.losses.sparse_softmax_cross_entropy instead.
Loading the Dataset:
telecom = pd.read_csv('telecom_customer_churn.csv')
telecom.head(3)
| Customer ID | Gender | Age | Married | Number of Dependents | City | Zip Code | Latitude | Longitude | Number of Referrals | ... | Payment Method | Monthly Charge | Total Charges | Total Refunds | Total Extra Data Charges | Total Long Distance Charges | Total Revenue | Customer Status | Churn Category | Churn Reason | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0002-ORFBO | Female | 37 | Yes | 0 | Frazier Park | 93225 | 34.827662 | -118.999073 | 2 | ... | Credit Card | 65.6 | 593.30 | 0.00 | 0 | 381.51 | 974.81 | Stayed | NaN | NaN |
| 1 | 0003-MKNFE | Male | 46 | No | 0 | Glendale | 91206 | 34.162515 | -118.203869 | 0 | ... | Credit Card | -4.0 | 542.40 | 38.33 | 10 | 96.21 | 610.28 | Stayed | NaN | NaN |
| 2 | 0004-TLHLJ | Male | 50 | No | 0 | Costa Mesa | 92627 | 33.645672 | -117.922613 | 0 | ... | Bank Withdrawal | 73.9 | 280.85 | 0.00 | 0 | 134.60 | 415.45 | Churned | Competitor | Competitor had better devices |
3 rows × 38 columns
originalData = telecom
originalData.head()
| Customer ID | Gender | Age | Married | Number of Dependents | City | Zip Code | Latitude | Longitude | Number of Referrals | ... | Payment Method | Monthly Charge | Total Charges | Total Refunds | Total Extra Data Charges | Total Long Distance Charges | Total Revenue | Customer Status | Churn Category | Churn Reason | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0002-ORFBO | Female | 37 | Yes | 0 | Frazier Park | 93225 | 34.827662 | -118.999073 | 2 | ... | Credit Card | 65.6 | 593.30 | 0.00 | 0 | 381.51 | 974.81 | Stayed | NaN | NaN |
| 1 | 0003-MKNFE | Male | 46 | No | 0 | Glendale | 91206 | 34.162515 | -118.203869 | 0 | ... | Credit Card | -4.0 | 542.40 | 38.33 | 10 | 96.21 | 610.28 | Stayed | NaN | NaN |
| 2 | 0004-TLHLJ | Male | 50 | No | 0 | Costa Mesa | 92627 | 33.645672 | -117.922613 | 0 | ... | Bank Withdrawal | 73.9 | 280.85 | 0.00 | 0 | 134.60 | 415.45 | Churned | Competitor | Competitor had better devices |
| 3 | 0011-IGKFF | Male | 78 | Yes | 0 | Martinez | 94553 | 38.014457 | -122.115432 | 1 | ... | Bank Withdrawal | 98.0 | 1237.85 | 0.00 | 0 | 361.66 | 1599.51 | Churned | Dissatisfaction | Product dissatisfaction |
| 4 | 0013-EXCHZ | Female | 75 | Yes | 0 | Camarillo | 93010 | 34.227846 | -119.079903 | 3 | ... | Credit Card | 83.9 | 267.40 | 0.00 | 0 | 22.14 | 289.54 | Churned | Dissatisfaction | Network reliability |
5 rows × 38 columns
telecom.info()
<class 'pandas.core.frame.DataFrame'> RangeIndex: 7043 entries, 0 to 7042 Data columns (total 38 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Customer ID 7043 non-null object 1 Gender 7043 non-null object 2 Age 7043 non-null int64 3 Married 7043 non-null object 4 Number of Dependents 7043 non-null int64 5 City 7043 non-null object 6 Zip Code 7043 non-null int64 7 Latitude 7043 non-null float64 8 Longitude 7043 non-null float64 9 Number of Referrals 7043 non-null int64 10 Tenure in Months 7043 non-null int64 11 Offer 7043 non-null object 12 Phone Service 7043 non-null object 13 Avg Monthly Long Distance Charges 6361 non-null float64 14 Multiple Lines 6361 non-null object 15 Internet Service 7043 non-null object 16 Internet Type 5517 non-null object 17 Avg Monthly GB Download 5517 non-null float64 18 Online Security 5517 non-null object 19 Online Backup 5517 non-null object 20 Device Protection Plan 5517 non-null object 21 Premium Tech Support 5517 non-null object 22 Streaming TV 5517 non-null object 23 Streaming Movies 5517 non-null object 24 Streaming Music 5517 non-null object 25 Unlimited Data 5517 non-null object 26 Contract 7043 non-null object 27 Paperless Billing 7043 non-null object 28 Payment Method 7043 non-null object 29 Monthly Charge 7043 non-null float64 30 Total Charges 7043 non-null float64 31 Total Refunds 7043 non-null float64 32 Total Extra Data Charges 7043 non-null int64 33 Total Long Distance Charges 7043 non-null float64 34 Total Revenue 7043 non-null float64 35 Customer Status 7043 non-null object 36 Churn Category 1869 non-null object 37 Churn Reason 1869 non-null object dtypes: float64(9), int64(6), object(23) memory usage: 2.0+ MB
Exploratory Data Analysis (EDA):
telecom.hist(figsize=(15,15), xrot=0);
telecom.describe()
| Age | Number of Dependents | Zip Code | Latitude | Longitude | Number of Referrals | Tenure in Months | Avg Monthly Long Distance Charges | Avg Monthly GB Download | Monthly Charge | Total Charges | Total Refunds | Total Extra Data Charges | Total Long Distance Charges | Total Revenue | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| count | 7043.000000 | 7043.000000 | 7043.000000 | 7043.000000 | 7043.000000 | 7043.000000 | 7043.000000 | 6361.000000 | 5517.000000 | 7043.000000 | 7043.000000 | 7043.000000 | 7043.000000 | 7043.000000 | 7043.000000 |
| mean | 46.509726 | 0.468692 | 93486.070567 | 36.197455 | -119.756684 | 1.951867 | 32.386767 | 25.420517 | 26.189958 | 63.596131 | 2280.381264 | 1.962182 | 6.860713 | 749.099262 | 3034.379056 |
| std | 16.750352 | 0.962802 | 1856.767505 | 2.468929 | 2.154425 | 3.001199 | 24.542061 | 14.200374 | 19.586585 | 31.204743 | 2266.220462 | 7.902614 | 25.104978 | 846.660055 | 2865.204542 |
| min | 19.000000 | 0.000000 | 90001.000000 | 32.555828 | -124.301372 | 0.000000 | 1.000000 | 1.010000 | 2.000000 | -10.000000 | 18.800000 | 0.000000 | 0.000000 | 0.000000 | 21.360000 |
| 25% | 32.000000 | 0.000000 | 92101.000000 | 33.990646 | -121.788090 | 0.000000 | 9.000000 | 13.050000 | 13.000000 | 30.400000 | 400.150000 | 0.000000 | 0.000000 | 70.545000 | 605.610000 |
| 50% | 46.000000 | 0.000000 | 93518.000000 | 36.205465 | -119.595293 | 0.000000 | 29.000000 | 25.690000 | 21.000000 | 70.050000 | 1394.550000 | 0.000000 | 0.000000 | 401.440000 | 2108.640000 |
| 75% | 60.000000 | 0.000000 | 95329.000000 | 38.161321 | -117.969795 | 3.000000 | 55.000000 | 37.680000 | 30.000000 | 89.750000 | 3786.600000 | 0.000000 | 0.000000 | 1191.100000 | 4801.145000 |
| max | 80.000000 | 9.000000 | 96150.000000 | 41.962127 | -114.192901 | 11.000000 | 72.000000 | 49.990000 | 85.000000 | 118.750000 | 8684.800000 | 49.790000 | 150.000000 | 3564.720000 | 11979.340000 |
telecom.pivot_table(values='Monthly Charge', index='Contract', columns='Customer Status', aggfunc='mean')
| Customer Status | Churned | Joined | Stayed |
|---|---|---|---|
| Contract | |||
| Month-to-Month | 71.784230 | 42.392279 | 62.284131 |
| One Year | 85.050904 | 35.622917 | 62.390404 |
| Two Year | 86.777083 | 57.695455 | 60.781081 |
telecom.pivot_table(values='Monthly Charge', index=['Contract', 'Multiple Lines'], columns='Customer Status', aggfunc='mean')
| Customer Status | Churned | Joined | Stayed | |
|---|---|---|---|---|
| Contract | Multiple Lines | |||
| Month-to-Month | No | 64.946474 | 41.740937 | 54.533750 |
| Yes | 86.868837 | 59.691250 | 79.809915 | |
| One Year | No | 77.111017 | 34.409091 | 50.719880 |
| Yes | 95.886022 | 72.100000 | 80.950090 | |
| Two Year | No | 67.695000 | 54.983333 | 44.332596 |
| Yes | 95.341429 | 69.900000 | 75.144545 |
telecom.pivot_table(values='Monthly Charge', index=['Contract', 'Internet Service'], columns='Customer Status', aggfunc='mean')
| Customer Status | Churned | Joined | Stayed | |
|---|---|---|---|---|
| Contract | Internet Service | |||
| Month-to-Month | No | 19.356566 | 19.972531 | 20.314202 |
| Yes | 75.119923 | 57.156504 | 70.645581 | |
| One Year | No | 21.138889 | 18.543333 | 20.339766 |
| Yes | 88.714650 | 64.088889 | 76.517436 | |
| Two Year | No | 22.700000 | 21.100000 | 21.266851 |
| Yes | 94.227907 | 68.458824 | 81.926715 |
telecom.pivot_table(values='Tenure in Months', index='Contract', columns='Customer Status', aggfunc='mean')
| Customer Status | Churned | Joined | Stayed |
|---|---|---|---|
| Contract | |||
| Month-to-Month | 14.016918 | 1.705882 | 25.042663 |
| One Year | 44.963855 | 1.875000 | 41.494853 |
| Two Year | 61.270833 | 1.818182 | 54.353006 |
Customer_Stayed=telecom[telecom['Customer Status']=='Stayed'].Age
Customer_Churned=telecom[telecom['Customer Status']=='Churned'].Age
Customer_Joined=telecom[telecom['Customer Status']=='Joined'].Age
plt.xlabel('Age')
plt.ylabel('Customers Numbers')
plt.hist([Customer_Stayed,Customer_Churned,Customer_Joined],label=['Stayed','Churned','Joined'])
plt.title('Customers Behavior ',fontweight ="bold")
plt.legend();
sns.histplot(telecom['Age']);
telecom_corr = telecom.select_dtypes(include='number')
corr_data = telecom_corr.corr()
plt.figure(figsize = (20,10))
sns.heatmap(corr_data, annot = True);
fig = px.histogram(telecom, x="Customer Status", template ='xgridoff',barmode = "group", title = "<b>Customer Status Distribution<b>")
fig.update_layout(width=400, height=400, bargap=0.2)
fig.show()
pd.crosstab(telecom['Customer Status'], telecom['Married']).plot(kind='bar');
pd.crosstab(telecom['Customer Status'], telecom['Gender']).plot(kind='bar')
<Axes: xlabel='Customer Status'>
sns.countplot(data=telecom, x='Customer Status', hue='Internet Type');
fig,axes=plt.subplots(7,2,figsize=(10,20))
sns.countplot(x="Gender",hue='Customer Status',data=telecom,ax=axes[0,0])
sns.countplot(x="Married",hue='Customer Status',data=telecom,ax=axes[0,1])
sns.countplot(x="Phone Service",hue='Customer Status',data=telecom,ax=axes[1,0])
sns.countplot(x="Multiple Lines",hue='Customer Status',data=telecom,ax=axes[1,1])
sns.countplot(x="Internet Service",hue='Customer Status',data=telecom,ax=axes[2,0])
sns.countplot(x="Online Security",hue='Customer Status',data=telecom,ax=axes[2,1])
sns.countplot(x="Online Backup",hue='Customer Status',data=telecom,ax=axes[3,0])
sns.countplot(x="Device Protection Plan",hue='Customer Status',data=telecom,ax=axes[3,1])
sns.countplot(x="Premium Tech Support",hue='Customer Status',data=telecom,ax=axes[4,0])
sns.countplot(x="Streaming TV",hue='Customer Status',data=telecom,ax=axes[4,1])
sns.countplot(x="Streaming Movies",hue='Customer Status',data=telecom,ax=axes[5,0])
sns.countplot(x="Streaming Music",hue='Customer Status',data=telecom,ax=axes[5,1])
sns.countplot(x="Unlimited Data",hue='Customer Status',data=telecom,ax=axes[6,0])
sns.countplot(x="Paperless Billing",hue='Customer Status',data=telecom,ax=axes[6,1])
plt.tight_layout()
plt.show()
fig, ax = plt.subplots(figsize=(12, 8))
order = telecom[telecom['Customer Status'] == 'Churned']['Churn Reason'].value_counts().index
sns.countplot(data=telecom[telecom['Customer Status'] == 'Churned'], y='Churn Reason', hue='Customer Status', ax=ax, order=order)
plt.xlabel('Count')
plt.ylabel('Churn Reason')
plt.title('Count of Customer by Churn Reasons')
plt.show()
Data Cleaning:
msno.matrix(telecom)
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) Cell In[21], line 1 ----> 1 msno.matrix(telecom) File C:\Anaconda\Lib\site-packages\missingno\missingno.py:72, in matrix(df, filter, n, p, sort, figsize, width_ratios, color, fontsize, labels, sparkline, inline, freq, ax) 70 # Remove extraneous default visual elements. 71 ax0.set_aspect('auto') ---> 72 ax0.grid(b=False) 73 ax0.xaxis.tick_top() 74 ax0.xaxis.set_ticks_position('none') File C:\Anaconda\Lib\site-packages\matplotlib\axes\_base.py:3194, in _AxesBase.grid(self, visible, which, axis, **kwargs) 3192 _api.check_in_list(['x', 'y', 'both'], axis=axis) 3193 if axis in ['x', 'both']: -> 3194 self.xaxis.grid(visible, which=which, **kwargs) 3195 if axis in ['y', 'both']: 3196 self.yaxis.grid(visible, which=which, **kwargs) File C:\Anaconda\Lib\site-packages\matplotlib\axis.py:1698, in Axis.grid(self, visible, which, **kwargs) 1695 if which in ['major', 'both']: 1696 gridkw['gridOn'] = (not self._major_tick_kw['gridOn'] 1697 if visible is None else visible) -> 1698 self.set_tick_params(which='major', **gridkw) 1699 self.stale = True File C:\Anaconda\Lib\site-packages\matplotlib\axis.py:944, in Axis.set_tick_params(self, which, reset, **kwargs) 931 """ 932 Set appearance parameters for ticks, ticklabels, and gridlines. 933 (...) 941 gridlines. 942 """ 943 _api.check_in_list(['major', 'minor', 'both'], which=which) --> 944 kwtrans = self._translate_tick_params(kwargs) 946 # the kwargs are stored in self._major/minor_tick_kw so that any 947 # future new ticks will automatically get them 948 if reset: File C:\Anaconda\Lib\site-packages\matplotlib\axis.py:1088, in Axis._translate_tick_params(kw, reverse) 1086 for key in kw_: 1087 if key not in allowed_keys: -> 1088 raise ValueError( 1089 "keyword %s is not recognized; valid keywords are %s" 1090 % (key, allowed_keys)) 1091 kwtrans.update(kw_) 1092 return kwtrans ValueError: keyword grid_b is not recognized; valid keywords are ['size', 'width', 'color', 'tickdir', 'pad', 'labelsize', 'labelcolor', 'zorder', 'gridOn', 'tick1On', 'tick2On', 'label1On', 'label2On', 'length', 'direction', 'left', 'bottom', 'right', 'top', 'labelleft', 'labelbottom', 'labelright', 'labeltop', 'labelrotation', 'grid_agg_filter', 'grid_alpha', 'grid_animated', 'grid_antialiased', 'grid_clip_box', 'grid_clip_on', 'grid_clip_path', 'grid_color', 'grid_dash_capstyle', 'grid_dash_joinstyle', 'grid_dashes', 'grid_data', 'grid_drawstyle', 'grid_figure', 'grid_fillstyle', 'grid_gapcolor', 'grid_gid', 'grid_in_layout', 'grid_label', 'grid_linestyle', 'grid_linewidth', 'grid_marker', 'grid_markeredgecolor', 'grid_markeredgewidth', 'grid_markerfacecolor', 'grid_markerfacecoloralt', 'grid_markersize', 'grid_markevery', 'grid_mouseover', 'grid_path_effects', 'grid_picker', 'grid_pickradius', 'grid_rasterized', 'grid_sketch_params', 'grid_snap', 'grid_solid_capstyle', 'grid_solid_joinstyle', 'grid_transform', 'grid_url', 'grid_visible', 'grid_xdata', 'grid_ydata', 'grid_zorder', 'grid_aa', 'grid_c', 'grid_ds', 'grid_ls', 'grid_lw', 'grid_mec', 'grid_mew', 'grid_mfc', 'grid_mfcalt', 'grid_ms']
#Dropping the whole columns
telecom = telecom.drop('Churn Category',axis=1)
telecom = telecom.drop('Churn Reason',axis=1)
telecom = telecom.drop('Customer ID',axis=1)
#dropping the data values of "Joined" in the customer status
telecom = telecom.loc[telecom['Customer Status'] != 'Joined']
telecom.columns
Index(['Gender', 'Age', 'Married', 'Number of Dependents', 'City', 'Zip Code',
'Latitude', 'Longitude', 'Number of Referrals', 'Tenure in Months',
'Offer', 'Phone Service', 'Avg Monthly Long Distance Charges',
'Multiple Lines', 'Internet Service', 'Internet Type',
'Avg Monthly GB Download', 'Online Security', 'Online Backup',
'Device Protection Plan', 'Premium Tech Support', 'Streaming TV',
'Streaming Movies', 'Streaming Music', 'Unlimited Data', 'Contract',
'Paperless Billing', 'Payment Method', 'Monthly Charge',
'Total Charges', 'Total Refunds', 'Total Extra Data Charges',
'Total Long Distance Charges', 'Total Revenue', 'Customer Status'],
dtype='object')
a=telecom['Internet Service']
a.value_counts()
Internet Service Yes 5245 No 1344 Name: count, dtype: int64
b=telecom['Internet Type']
b.value_counts()
Internet Type Fiber Optic 2934 DSL 1537 Cable 774 Name: count, dtype: int64
Handling Null and Missing Values:
telecom['Internet Type'] = telecom['Internet Type'].fillna('No Internet')
telecom['Avg Monthly GB Download'] = telecom['Avg Monthly GB Download'].fillna(0)
telecom['Online Security'] = telecom['Online Security'].fillna('No Internet')
telecom['Online Backup'] = telecom['Online Backup'].fillna('No Internet')
telecom['Device Protection Plan'] = telecom['Device Protection Plan'].fillna('No Internet')
telecom['Premium Tech Support'] = telecom['Premium Tech Support'].fillna('No Internet')
telecom['Streaming TV'] = telecom['Streaming TV'].fillna('No Internet')
telecom['Streaming Movies'] = telecom['Streaming Movies'].fillna('No Internet')
telecom['Streaming Music'] = telecom['Streaming Music'].fillna('No Internet')
telecom['Unlimited Data'] = telecom['Unlimited Data'].fillna('No Internet')
telecom.isna().sum()
Gender 0 Age 0 Married 0 Number of Dependents 0 City 0 Zip Code 0 Latitude 0 Longitude 0 Number of Referrals 0 Tenure in Months 0 Offer 0 Phone Service 0 Avg Monthly Long Distance Charges 644 Multiple Lines 644 Internet Service 0 Internet Type 0 Avg Monthly GB Download 0 Online Security 0 Online Backup 0 Device Protection Plan 0 Premium Tech Support 0 Streaming TV 0 Streaming Movies 0 Streaming Music 0 Unlimited Data 0 Contract 0 Paperless Billing 0 Payment Method 0 Monthly Charge 0 Total Charges 0 Total Refunds 0 Total Extra Data Charges 0 Total Long Distance Charges 0 Total Revenue 0 Customer Status 0 dtype: int64
telecom['Phone Service'].value_counts()
Phone Service Yes 5945 No 644 Name: count, dtype: int64
telecom['Multiple Lines'] = telecom['Multiple Lines'].fillna('No Phone Service')
telecom['Avg Monthly Long Distance Charges'] = telecom['Avg Monthly Long Distance Charges'].fillna(0)
Dropping Duplicates:
#Dropping the duplicate values (if any)
telecom = telecom.drop_duplicates()
msno.matrix(telecom)
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) Cell In[32], line 1 ----> 1 msno.matrix(telecom) File C:\Anaconda\Lib\site-packages\missingno\missingno.py:72, in matrix(df, filter, n, p, sort, figsize, width_ratios, color, fontsize, labels, sparkline, inline, freq, ax) 70 # Remove extraneous default visual elements. 71 ax0.set_aspect('auto') ---> 72 ax0.grid(b=False) 73 ax0.xaxis.tick_top() 74 ax0.xaxis.set_ticks_position('none') File C:\Anaconda\Lib\site-packages\matplotlib\axes\_base.py:3194, in _AxesBase.grid(self, visible, which, axis, **kwargs) 3192 _api.check_in_list(['x', 'y', 'both'], axis=axis) 3193 if axis in ['x', 'both']: -> 3194 self.xaxis.grid(visible, which=which, **kwargs) 3195 if axis in ['y', 'both']: 3196 self.yaxis.grid(visible, which=which, **kwargs) File C:\Anaconda\Lib\site-packages\matplotlib\axis.py:1698, in Axis.grid(self, visible, which, **kwargs) 1695 if which in ['major', 'both']: 1696 gridkw['gridOn'] = (not self._major_tick_kw['gridOn'] 1697 if visible is None else visible) -> 1698 self.set_tick_params(which='major', **gridkw) 1699 self.stale = True File C:\Anaconda\Lib\site-packages\matplotlib\axis.py:944, in Axis.set_tick_params(self, which, reset, **kwargs) 931 """ 932 Set appearance parameters for ticks, ticklabels, and gridlines. 933 (...) 941 gridlines. 942 """ 943 _api.check_in_list(['major', 'minor', 'both'], which=which) --> 944 kwtrans = self._translate_tick_params(kwargs) 946 # the kwargs are stored in self._major/minor_tick_kw so that any 947 # future new ticks will automatically get them 948 if reset: File C:\Anaconda\Lib\site-packages\matplotlib\axis.py:1088, in Axis._translate_tick_params(kw, reverse) 1086 for key in kw_: 1087 if key not in allowed_keys: -> 1088 raise ValueError( 1089 "keyword %s is not recognized; valid keywords are %s" 1090 % (key, allowed_keys)) 1091 kwtrans.update(kw_) 1092 return kwtrans ValueError: keyword grid_b is not recognized; valid keywords are ['size', 'width', 'color', 'tickdir', 'pad', 'labelsize', 'labelcolor', 'zorder', 'gridOn', 'tick1On', 'tick2On', 'label1On', 'label2On', 'length', 'direction', 'left', 'bottom', 'right', 'top', 'labelleft', 'labelbottom', 'labelright', 'labeltop', 'labelrotation', 'grid_agg_filter', 'grid_alpha', 'grid_animated', 'grid_antialiased', 'grid_clip_box', 'grid_clip_on', 'grid_clip_path', 'grid_color', 'grid_dash_capstyle', 'grid_dash_joinstyle', 'grid_dashes', 'grid_data', 'grid_drawstyle', 'grid_figure', 'grid_fillstyle', 'grid_gapcolor', 'grid_gid', 'grid_in_layout', 'grid_label', 'grid_linestyle', 'grid_linewidth', 'grid_marker', 'grid_markeredgecolor', 'grid_markeredgewidth', 'grid_markerfacecolor', 'grid_markerfacecoloralt', 'grid_markersize', 'grid_markevery', 'grid_mouseover', 'grid_path_effects', 'grid_picker', 'grid_pickradius', 'grid_rasterized', 'grid_sketch_params', 'grid_snap', 'grid_solid_capstyle', 'grid_solid_joinstyle', 'grid_transform', 'grid_url', 'grid_visible', 'grid_xdata', 'grid_ydata', 'grid_zorder', 'grid_aa', 'grid_c', 'grid_ds', 'grid_ls', 'grid_lw', 'grid_mec', 'grid_mew', 'grid_mfc', 'grid_mfcalt', 'grid_ms']
telecom.info()
<class 'pandas.core.frame.DataFrame'> Index: 6589 entries, 0 to 7042 Data columns (total 35 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 Gender 6589 non-null object 1 Age 6589 non-null int64 2 Married 6589 non-null object 3 Number of Dependents 6589 non-null int64 4 City 6589 non-null object 5 Zip Code 6589 non-null int64 6 Latitude 6589 non-null float64 7 Longitude 6589 non-null float64 8 Number of Referrals 6589 non-null int64 9 Tenure in Months 6589 non-null int64 10 Offer 6589 non-null object 11 Phone Service 6589 non-null object 12 Avg Monthly Long Distance Charges 6589 non-null float64 13 Multiple Lines 6589 non-null object 14 Internet Service 6589 non-null object 15 Internet Type 6589 non-null object 16 Avg Monthly GB Download 6589 non-null float64 17 Online Security 6589 non-null object 18 Online Backup 6589 non-null object 19 Device Protection Plan 6589 non-null object 20 Premium Tech Support 6589 non-null object 21 Streaming TV 6589 non-null object 22 Streaming Movies 6589 non-null object 23 Streaming Music 6589 non-null object 24 Unlimited Data 6589 non-null object 25 Contract 6589 non-null object 26 Paperless Billing 6589 non-null object 27 Payment Method 6589 non-null object 28 Monthly Charge 6589 non-null float64 29 Total Charges 6589 non-null float64 30 Total Refunds 6589 non-null float64 31 Total Extra Data Charges 6589 non-null int64 32 Total Long Distance Charges 6589 non-null float64 33 Total Revenue 6589 non-null float64 34 Customer Status 6589 non-null object dtypes: float64(9), int64(6), object(20) memory usage: 1.8+ MB
profile = ProfileReport(telecom)
Outlier Analysis:
colnames=['Age','Number of Dependents','Number of Referrals','Tenure in Months','Avg Monthly Long Distance Charges',
'Avg Monthly GB Download','Monthly Charge','Total Charges','Total Extra Data Charges','Total Long Distance Charges','Total Revenue']
fig, ax = plt.subplots(4,3, figsize = (15,15))
for i, subplot in zip(colnames, ax.flatten()):
sns.boxplot(x = 'Customer Status', y = i , data = telecom, ax = subplot)
sns.histplot(telecom['Number of Dependents']);
sns.histplot(telecom['Number of Referrals']);
sns.histplot(telecom['Total Extra Data Charges']);
S=telecom['Total Extra Data Charges'].value_counts()
S.head()
Total Extra Data Charges 0 5905 10 113 40 61 30 52 20 48 Name: count, dtype: int64
Data Preprocessing and Transformation:
categorical_columns = ['Gender', 'City', 'Zip Code', 'Married', 'Offer', 'Phone Service', 'Multiple Lines', 'Internet Service',
'Internet Type', 'Online Security', 'Online Backup', 'Device Protection Plan',
'Premium Tech Support', 'Streaming TV', 'Streaming Movies', 'Streaming Music',
'Unlimited Data', 'Contract', 'Paperless Billing', 'Payment Method', 'Customer Status']
for column in categorical_columns:
telecom[column] = pd.Categorical(telecom[column])
telecom.dtypes
Gender category Age int64 Married category Number of Dependents int64 City category Zip Code category Latitude float64 Longitude float64 Number of Referrals int64 Tenure in Months int64 Offer category Phone Service category Avg Monthly Long Distance Charges float64 Multiple Lines category Internet Service category Internet Type category Avg Monthly GB Download float64 Online Security category Online Backup category Device Protection Plan category Premium Tech Support category Streaming TV category Streaming Movies category Streaming Music category Unlimited Data category Contract category Paperless Billing category Payment Method category Monthly Charge float64 Total Charges float64 Total Refunds float64 Total Extra Data Charges int64 Total Long Distance Charges float64 Total Revenue float64 Customer Status category dtype: object
data_telecom = telecom.copy()
df_X = telecom.drop('Customer Status', axis=1)
df_y = telecom['Customer Status']
set_config(display='diagram') # shows the pipeline graphically when printed
num_pipeline = Pipeline([
('imputer', SimpleImputer(strategy='median')),
('scaler', StandardScaler())
])
cat_pipeline = Pipeline([
('imputer', SimpleImputer(strategy='most_frequent')),
('cat_encoder', OneHotEncoder(sparse=False)) # returns a
# regular matrix that can be combined easily with the data we get from
# the numeric pipeline
])
SimpleImputer.get_feature_names_out = StandardScaler.get_feature_names_out #
prep_pipeline = ColumnTransformer([
('num', num_pipeline, make_column_selector(dtype_include=np.number)),
('cat', cat_pipeline, make_column_selector(dtype_include='category'))
])
prep_pipeline
ColumnTransformer(transformers=[('num',
Pipeline(steps=[('imputer',
SimpleImputer(strategy='median')),
('scaler', StandardScaler())]),
<sklearn.compose._column_transformer.make_column_selector object at 0x00000241471A7F10>),
('cat',
Pipeline(steps=[('imputer',
SimpleImputer(strategy='most_frequent')),
('cat_encoder',
OneHotEncoder(sparse=False))]),
<sklearn.compose._column_transformer.make_column_selector object at 0x000002413F0B0F90>)])In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
ColumnTransformer(transformers=[('num',
Pipeline(steps=[('imputer',
SimpleImputer(strategy='median')),
('scaler', StandardScaler())]),
<sklearn.compose._column_transformer.make_column_selector object at 0x00000241471A7F10>),
('cat',
Pipeline(steps=[('imputer',
SimpleImputer(strategy='most_frequent')),
('cat_encoder',
OneHotEncoder(sparse=False))]),
<sklearn.compose._column_transformer.make_column_selector object at 0x000002413F0B0F90>)])<sklearn.compose._column_transformer.make_column_selector object at 0x00000241471A7F10>
SimpleImputer(strategy='median')
StandardScaler()
<sklearn.compose._column_transformer.make_column_selector object at 0x000002413F0B0F90>
SimpleImputer(strategy='most_frequent')
OneHotEncoder(sparse=False)
processed_X = prep_pipeline.fit_transform(df_X, df_y)
df_processed_X = pd.DataFrame(processed_X,
columns = prep_pipeline.get_feature_names_out(),
index = df_X.index)
print(df_processed_X.shape)
df_processed_X.head()
(6589, 2799)
| num__Age | num__Number of Dependents | num__Latitude | num__Longitude | num__Number of Referrals | num__Tenure in Months | num__Avg Monthly Long Distance Charges | num__Avg Monthly GB Download | num__Monthly Charge | num__Total Charges | ... | cat__Unlimited Data_No Internet | cat__Unlimited Data_Yes | cat__Contract_Month-to-Month | cat__Contract_One Year | cat__Contract_Two Year | cat__Paperless Billing_No | cat__Paperless Billing_Yes | cat__Payment Method_Bank Withdrawal | cat__Payment Method_Credit Card | cat__Payment Method_Mailed Check | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.579600 | -0.491619 | -0.554996 | 0.351930 | -0.006926 | -1.063957 | 1.253468 | -0.238987 | 0.018307 | -0.811689 | ... | 0.0 | 1.0 | 0.0 | 1.0 | 0.0 | 0.0 | 1.0 | 0.0 | 1.0 | 0.0 |
| 1 | -0.045161 | -0.491619 | -0.824038 | 0.720843 | -0.668349 | -1.063957 | -0.796233 | -0.533037 | -2.219753 | -0.834158 | ... | 0.0 | 0.0 | 1.0 | 0.0 | 0.0 | 1.0 | 0.0 | 0.0 | 1.0 | 0.0 |
| 2 | 0.192368 | -0.491619 | -1.033094 | 0.851324 | -0.668349 | -1.272578 | 0.688345 | 0.447129 | 0.285202 | -0.949616 | ... | 0.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 | 0.0 | 0.0 |
| 3 | 1.855067 | -0.491619 | 0.734020 | -1.093819 | -0.337638 | -0.897060 | 0.311381 | -0.827087 | 1.060162 | -0.527161 | ... | 0.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 | 0.0 | 0.0 |
| 4 | 1.676921 | -0.491619 | -0.797613 | 0.314431 | 0.323785 | -1.314302 | -1.010255 | -0.484029 | 0.606762 | -0.955553 | ... | 0.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 1.0 | 0.0 | 1.0 | 0.0 |
5 rows × 2799 columns
Feature Selection:
Univariate Feature Selection:
selector = SelectKBest(f_classif, k=50)
selector.fit_transform(df_processed_X, df_y)
cols = selector.get_support(indices=True)
features_df_new = df_processed_X.iloc[:,cols]
features_df_new.head()
| num__Age | num__Number of Dependents | num__Number of Referrals | num__Tenure in Months | num__Monthly Charge | num__Total Charges | num__Total Long Distance Charges | num__Total Revenue | cat__Married_No | cat__Married_Yes | ... | cat__Streaming Music_No Internet | cat__Unlimited Data_No Internet | cat__Unlimited Data_Yes | cat__Contract_Month-to-Month | cat__Contract_One Year | cat__Contract_Two Year | cat__Paperless Billing_No | cat__Paperless Billing_Yes | cat__Payment Method_Bank Withdrawal | cat__Payment Method_Credit Card | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | -0.579600 | -0.491619 | -0.006926 | -1.063957 | 0.018307 | -0.811689 | -0.487965 | -0.791908 | 0.0 | 1.0 | ... | 0.0 | 0.0 | 1.0 | 0.0 | 1.0 | 0.0 | 0.0 | 1.0 | 0.0 | 1.0 |
| 1 | -0.045161 | -0.491619 | -0.668349 | -1.063957 | -2.219753 | -0.834158 | -0.822156 | -0.919617 | 1.0 | 0.0 | ... | 0.0 | 0.0 | 0.0 | 1.0 | 0.0 | 0.0 | 1.0 | 0.0 | 0.0 | 1.0 |
| 2 | 0.192368 | -0.491619 | -0.668349 | -1.272578 | 0.285202 | -0.949616 | -0.777187 | -0.987873 | 1.0 | 0.0 | ... | 0.0 | 0.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 | 0.0 |
| 3 | 1.855067 | -0.491619 | -0.337638 | -0.897060 | 1.060162 | -0.527161 | -0.511217 | -0.573051 | 0.0 | 1.0 | ... | 0.0 | 0.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 1.0 | 1.0 | 0.0 |
| 4 | 1.676921 | -0.491619 | 0.323785 | -1.314302 | 0.606762 | -0.955553 | -0.908919 | -1.031984 | 0.0 | 1.0 | ... | 0.0 | 0.0 | 1.0 | 1.0 | 0.0 | 0.0 | 0.0 | 1.0 | 0.0 | 1.0 |
5 rows × 50 columns
Principal Component Analysis:
pca = PCA(n_components=30)
pca_features = pca.fit_transform(features_df_new, df_y)
new_df = pca.transform(features_df_new)
Recurrent Feature Elimination:
random_forest = RandomForestClassifier()
rfe = RFE(random_forest, n_features_to_select=20)
rfe.fit(pca_features, df_y)
selected_features = rfe.support_
cols1 = rfe.get_support(indices=True)
df_3 = features_df_new.iloc[:,cols1]
df_3.columns
Index(['num__Age', 'num__Number of Dependents', 'num__Number of Referrals',
'num__Tenure in Months', 'num__Monthly Charge', 'num__Total Charges',
'cat__Married_Yes', 'cat__Zip Code_92122', 'cat__Offer_Offer A',
'cat__Offer_Offer B', 'cat__Offer_Offer E', 'cat__Internet Service_Yes',
'cat__Internet Type_DSL', 'cat__Internet Type_No Internet',
'cat__Online Security_No', 'cat__Online Security_No Internet',
'cat__Online Security_Yes', 'cat__Online Backup_No Internet',
'cat__Online Backup_Yes', 'cat__Device Protection Plan_No'],
dtype='object')
print(df_3.head())
num__Age num__Number of Dependents num__Number of Referrals \ 0 -0.579600 -0.491619 -0.006926 1 -0.045161 -0.491619 -0.668349 2 0.192368 -0.491619 -0.668349 3 1.855067 -0.491619 -0.337638 4 1.676921 -0.491619 0.323785 num__Tenure in Months num__Monthly Charge num__Total Charges \ 0 -1.063957 0.018307 -0.811689 1 -1.063957 -2.219753 -0.834158 2 -1.272578 0.285202 -0.949616 3 -0.897060 1.060162 -0.527161 4 -1.314302 0.606762 -0.955553 cat__Married_Yes cat__Zip Code_92122 cat__Offer_Offer A \ 0 1.0 0.0 0.0 1 0.0 0.0 0.0 2 0.0 0.0 0.0 3 1.0 0.0 0.0 4 1.0 0.0 0.0 cat__Offer_Offer B cat__Offer_Offer E cat__Internet Service_Yes \ 0 0.0 0.0 1.0 1 0.0 0.0 1.0 2 0.0 1.0 1.0 3 0.0 0.0 1.0 4 0.0 0.0 1.0 cat__Internet Type_DSL cat__Internet Type_No Internet \ 0 0.0 0.0 1 0.0 0.0 2 0.0 0.0 3 0.0 0.0 4 0.0 0.0 cat__Online Security_No cat__Online Security_No Internet \ 0 1.0 0.0 1 1.0 0.0 2 1.0 0.0 3 1.0 0.0 4 1.0 0.0 cat__Online Security_Yes cat__Online Backup_No Internet \ 0 0.0 0.0 1 0.0 0.0 2 0.0 0.0 3 0.0 0.0 4 0.0 0.0 cat__Online Backup_Yes cat__Device Protection Plan_No 0 1.0 1.0 1 0.0 1.0 2 0.0 0.0 3 1.0 0.0 4 0.0 1.0
data = df_3.values
Clustering:
DBSCAN Clustering:
# Perform DBSCAN clustering
dbscan = DBSCAN(eps=1, min_samples=3)
dbscan_labels = dbscan.fit_predict(data)
# Get the number of unique labels
num_unique_labels = len(np.unique(dbscan_labels))
if num_unique_labels > 1:
# Calculate silhouette score only if there are multiple labels
dbscan_score = silhouette_score(data, dbscan_labels)
print("DBSCAN Silhouette Score:", dbscan_score)
else:
print("DBSCAN: Only one label found. Unable to calculate silhouette score.")
DBSCAN Silhouette Score: -0.14134191647931937
KMeans Clustering:
# Perform KMeans clustering
kmeans = KMeans(n_clusters=3, random_state=24, n_init=10)
kmeans_labels = kmeans.fit_predict(data)
# Calculate silhouette score for KMeans clustering
kmeans_score = silhouette_score(data, kmeans_labels)
print("KMeans Silhouette Score:", kmeans_score)
KMeans Silhouette Score: 0.24763374148651673
CLARANS Clustering:
num_clusters = 5
num_local = 3
max_neighborhoods = 2
# Initializing CLARANS clustering object
clarans_instance = clarans(data.tolist(), num_clusters, num_local, max_neighborhoods)
# Perform CLARANS clustering
(ticks, result) = timedcall(clarans_instance.process);
print("Execution time : ", ticks, "\n");
Execution time : 1693.922286099987
#Get the cluster labels
clst = clarans_instance.get_clusters();
#Get the best mediods of the clusters
med = clarans_instance.get_medoids();
print("Index of clusters' points :\n",clst)
print("\nLabel class of each point :\n ",df_y)
print("\nIndex of the best medoids : ",med)
Index of clusters' points :
[[2, 8, 22, 52, 60, 84, 89, 123, 134, 148, 156, 162, 170, 188, 203, 228, 230, 246, 257, 263, 271, 322, 362, 389, 391, 397, 410, 416, 441, 445, 453, 484, 488, 489, 493, 509, 517, 521, 526, 537, 552, 561, 574, 598, 599, 627, 645, 649, 654, 661, 690, 693, 695, 697, 748, 776, 782, 785, 796, 797, 806, 817, 847, 857, 866, 887, 888, 903, 905, 906, 945, 946, 958, 965, 975, 986, 995, 1023, 1028, 1038, 1043, 1045, 1046, 1076, 1108, 1146, 1158, 1164, 1213, 1226, 1241, 1248, 1260, 1267, 1269, 1280, 1306, 1311, 1316, 1328, 1358, 1362, 1409, 1455, 1469, 1515, 1531, 1532, 1546, 1572, 1597, 1602, 1604, 1617, 1640, 1650, 1659, 1670, 1676, 1689, 1710, 1713, 1717, 1745, 1749, 1751, 1761, 1770, 1789, 1794, 1817, 1822, 1828, 1831, 1839, 1849, 1855, 1892, 1919, 1967, 1972, 1975, 1984, 1985, 1999, 2004, 2014, 2039, 2044, 2110, 2119, 2132, 2172, 2193, 2221, 2222, 2235, 2242, 2282, 2299, 2309, 2336, 2385, 2387, 2389, 2396, 2419, 2421, 2447, 2452, 2463, 2486, 2492, 2501, 2551, 2562, 2575, 2596, 2598, 2623, 2627, 2657, 2663, 2675, 2682, 2721, 2739, 2742, 2743, 2753, 2758, 2764, 2770, 2775, 2780, 2787, 2800, 2820, 2835, 2847, 2853, 2854, 2855, 2870, 2885, 2892, 2918, 2922, 2934, 2936, 2944, 2951, 2966, 2974, 2978, 2980, 2982, 2987, 3001, 3069, 3110, 3114, 3123, 3125, 3131, 3134, 3141, 3142, 3150, 3188, 3192, 3194, 3221, 3225, 3237, 3238, 3266, 3306, 3310, 3319, 3320, 3352, 3371, 3396, 3399, 3403, 3449, 3468, 3483, 3492, 3543, 3553, 3569, 3586, 3591, 3592, 3645, 3646, 3659, 3704, 3707, 3709, 3742, 3756, 3759, 3760, 3785, 3790, 3791, 3795, 3797, 3811, 3819, 3822, 3840, 3856, 3872, 3879, 3883, 3891, 3933, 3946, 4033, 4048, 4068, 4072, 4073, 4079, 4083, 4100, 4114, 4136, 4150, 4151, 4162, 4163, 4186, 4219, 4225, 4231, 4234, 4256, 4262, 4274, 4308, 4338, 4372, 4392, 4398, 4406, 4421, 4430, 4456, 4459, 4461, 4505, 4507, 4510, 4522, 4550, 4569, 4589, 4599, 4613, 4621, 4641, 4699, 4727, 4762, 4769, 4793, 4798, 4830, 4834, 4844, 4852, 4868, 4869, 4889, 4892, 4901, 4904, 4932, 4933, 4990, 4998, 5001, 5008, 5021, 5026, 5027, 5030, 5036, 5106, 5145, 5177, 5183, 5190, 5197, 5203, 5209, 5212, 5213, 5216, 5233, 5236, 5239, 5250, 5251, 5253, 5279, 5285, 5300, 5312, 5349, 5368, 5372, 5399, 5468, 5482, 5509, 5513, 5529, 5539, 5547, 5550, 5556, 5560, 5570, 5571, 5583, 5658, 5683, 5684, 5707, 5714, 5721, 5755, 5771, 5776, 5781, 5819, 5824, 5842, 5843, 5871, 5910, 5912, 5939, 5940, 5956, 5957, 5960, 6049, 6078, 6094, 6108, 6117, 6123, 6128, 6137, 6151, 6153, 6175, 6247, 6256, 6266, 6273, 6280, 6298, 6342, 6382, 6441, 6445, 6451, 6512, 6516, 6521, 6524, 6529, 6556, 6574, 6583], [1, 12, 18, 23, 26, 32, 38, 67, 75, 86, 90, 92, 110, 114, 122, 125, 131, 165, 177, 178, 180, 182, 185, 186, 194, 195, 200, 212, 216, 240, 245, 265, 268, 270, 274, 289, 293, 311, 314, 315, 319, 325, 327, 329, 330, 332, 351, 360, 375, 380, 382, 400, 402, 404, 425, 426, 436, 457, 461, 463, 464, 478, 486, 500, 506, 507, 515, 520, 532, 535, 542, 548, 564, 571, 579, 588, 597, 606, 619, 626, 639, 641, 642, 643, 648, 656, 660, 666, 674, 678, 681, 683, 684, 700, 706, 707, 716, 728, 731, 734, 740, 742, 749, 764, 773, 775, 779, 780, 789, 794, 804, 809, 812, 813, 815, 818, 830, 854, 861, 863, 864, 870, 871, 876, 877, 881, 885, 890, 892, 894, 896, 912, 914, 916, 919, 932, 935, 937, 939, 942, 955, 977, 980, 994, 1010, 1011, 1012, 1013, 1030, 1032, 1037, 1042, 1047, 1056, 1057, 1058, 1060, 1061, 1068, 1070, 1078, 1079, 1082, 1092, 1116, 1118, 1128, 1129, 1135, 1141, 1144, 1149, 1154, 1175, 1176, 1177, 1178, 1182, 1184, 1190, 1196, 1208, 1210, 1211, 1215, 1221, 1234, 1239, 1240, 1242, 1249, 1251, 1252, 1270, 1272, 1276, 1279, 1325, 1326, 1339, 1344, 1345, 1348, 1360, 1374, 1389, 1400, 1410, 1420, 1422, 1430, 1435, 1437, 1456, 1473, 1487, 1490, 1491, 1493, 1494, 1503, 1505, 1529, 1541, 1543, 1557, 1561, 1566, 1568, 1570, 1574, 1582, 1606, 1612, 1618, 1645, 1646, 1661, 1662, 1663, 1665, 1668, 1672, 1673, 1680, 1690, 1723, 1725, 1726, 1742, 1743, 1759, 1760, 1763, 1764, 1767, 1775, 1780, 1781, 1782, 1783, 1784, 1786, 1799, 1803, 1805, 1809, 1811, 1812, 1819, 1824, 1825, 1842, 1844, 1850, 1859, 1876, 1889, 1904, 1909, 1912, 1913, 1929, 1932, 1956, 1980, 1982, 1989, 1990, 1994, 1996, 2021, 2022, 2030, 2034, 2041, 2053, 2055, 2064, 2067, 2068, 2097, 2105, 2106, 2107, 2108, 2117, 2127, 2129, 2145, 2154, 2164, 2166, 2171, 2173, 2177, 2181, 2184, 2196, 2224, 2226, 2229, 2239, 2252, 2261, 2266, 2267, 2288, 2291, 2293, 2304, 2306, 2316, 2328, 2329, 2330, 2337, 2341, 2351, 2353, 2354, 2357, 2359, 2376, 2378, 2383, 2384, 2390, 2391, 2395, 2402, 2412, 2425, 2426, 2427, 2430, 2448, 2449, 2450, 2451, 2453, 2455, 2470, 2475, 2487, 2488, 2494, 2498, 2502, 2503, 2512, 2518, 2519, 2523, 2525, 2528, 2532, 2539, 2546, 2548, 2573, 2581, 2590, 2597, 2603, 2608, 2619, 2631, 2647, 2653, 2660, 2671, 2672, 2683, 2692, 2703, 2709, 2716, 2718, 2728, 2732, 2736, 2737, 2741, 2763, 2793, 2795, 2801, 2802, 2807, 2815, 2817, 2819, 2822, 2826, 2832, 2834, 2841, 2857, 2860, 2868, 2873, 2876, 2886, 2888, 2895, 2897, 2903, 2904, 2906, 2909, 2913, 2920, 2943, 2955, 2963, 2967, 2973, 2977, 2994, 2996, 3000, 3009, 3011, 3019, 3023, 3025, 3030, 3037, 3039, 3042, 3049, 3050, 3057, 3070, 3078, 3080, 3085, 3087, 3094, 3101, 3103, 3104, 3105, 3112, 3113, 3116, 3117, 3118, 3129, 3147, 3148, 3151, 3158, 3177, 3197, 3200, 3204, 3208, 3217, 3219, 3226, 3236, 3243, 3244, 3245, 3247, 3248, 3258, 3264, 3272, 3277, 3278, 3293, 3295, 3299, 3305, 3307, 3309, 3315, 3316, 3318, 3328, 3332, 3335, 3341, 3344, 3353, 3372, 3381, 3382, 3383, 3387, 3398, 3400, 3409, 3413, 3420, 3424, 3426, 3445, 3446, 3458, 3472, 3491, 3504, 3517, 3519, 3521, 3546, 3560, 3566, 3576, 3581, 3583, 3612, 3631, 3634, 3636, 3640, 3641, 3644, 3648, 3654, 3660, 3669, 3673, 3674, 3675, 3676, 3678, 3680, 3682, 3683, 3686, 3687, 3696, 3701, 3706, 3712, 3713, 3714, 3722, 3724, 3725, 3731, 3737, 3740, 3744, 3768, 3770, 3774, 3776, 3782, 3784, 3809, 3817, 3821, 3823, 3828, 3846, 3857, 3858, 3863, 3869, 3878, 3885, 3899, 3908, 3916, 3926, 3929, 3938, 3944, 3949, 3950, 3973, 3976, 3999, 4002, 4004, 4008, 4010, 4021, 4031, 4032, 4041, 4049, 4056, 4057, 4061, 4065, 4066, 4075, 4081, 4084, 4086, 4111, 4112, 4124, 4125, 4137, 4141, 4154, 4181, 4188, 4199, 4200, 4207, 4227, 4230, 4235, 4242, 4246, 4247, 4253, 4260, 4268, 4280, 4281, 4287, 4290, 4294, 4304, 4315, 4317, 4321, 4324, 4326, 4335, 4336, 4343, 4344, 4347, 4355, 4360, 4362, 4370, 4375, 4379, 4382, 4389, 4395, 4397, 4403, 4410, 4431, 4433, 4442, 4453, 4458, 4464, 4465, 4475, 4493, 4497, 4500, 4516, 4532, 4535, 4537, 4539, 4555, 4578, 4595, 4601, 4604, 4605, 4611, 4618, 4622, 4633, 4636, 4642, 4650, 4654, 4656, 4663, 4680, 4692, 4694, 4696, 4698, 4703, 4706, 4712, 4718, 4721, 4746, 4749, 4758, 4764, 4783, 4785, 4795, 4807, 4811, 4816, 4817, 4818, 4819, 4841, 4847, 4851, 4864, 4877, 4883, 4885, 4890, 4894, 4899, 4902, 4903, 4920, 4921, 4925, 4937, 4942, 4945, 4947, 4959, 4974, 4987, 4992, 4999, 5000, 5002, 5012, 5015, 5019, 5022, 5028, 5033, 5049, 5051, 5055, 5056, 5061, 5062, 5066, 5071, 5072, 5081, 5083, 5088, 5091, 5095, 5096, 5098, 5105, 5121, 5135, 5140, 5146, 5147, 5153, 5158, 5159, 5165, 5173, 5191, 5192, 5205, 5206, 5229, 5231, 5235, 5238, 5242, 5248, 5249, 5266, 5273, 5284, 5286, 5293, 5294, 5303, 5311, 5313, 5314, 5316, 5327, 5340, 5347, 5350, 5352, 5356, 5362, 5365, 5366, 5369, 5370, 5374, 5375, 5376, 5378, 5383, 5396, 5397, 5398, 5403, 5405, 5407, 5411, 5423, 5424, 5433, 5439, 5440, 5443, 5455, 5462, 5481, 5489, 5493, 5495, 5496, 5497, 5499, 5500, 5502, 5520, 5522, 5526, 5528, 5531, 5537, 5542, 5555, 5557, 5564, 5569, 5585, 5589, 5600, 5604, 5607, 5613, 5615, 5620, 5624, 5628, 5636, 5643, 5666, 5667, 5669, 5680, 5686, 5690, 5712, 5717, 5720, 5728, 5734, 5735, 5737, 5759, 5760, 5769, 5770, 5777, 5780, 5789, 5801, 5805, 5806, 5807, 5816, 5818, 5832, 5839, 5861, 5877, 5880, 5881, 5885, 5892, 5901, 5907, 5909, 5911, 5926, 5942, 5943, 5944, 5954, 5958, 5963, 5971, 5979, 5981, 6007, 6011, 6016, 6025, 6037, 6046, 6048, 6063, 6074, 6085, 6086, 6088, 6091, 6096, 6104, 6110, 6130, 6138, 6140, 6145, 6149, 6158, 6165, 6168, 6169, 6177, 6182, 6189, 6197, 6214, 6216, 6220, 6225, 6227, 6239, 6244, 6252, 6263, 6270, 6276, 6278, 6283, 6293, 6303, 6305, 6315, 6316, 6339, 6340, 6350, 6354, 6370, 6371, 6393, 6394, 6401, 6405, 6408, 6410, 6417, 6419, 6429, 6447, 6448, 6452, 6453, 6462, 6467, 6469, 6478, 6479, 6485, 6494, 6496, 6506, 6507, 6510, 6545, 6549, 6569, 6572, 6584, 6585], [5, 25, 27, 28, 33, 36, 37, 40, 94, 101, 111, 126, 133, 144, 151, 153, 157, 159, 160, 161, 166, 173, 214, 218, 223, 233, 248, 249, 294, 301, 305, 307, 310, 312, 321, 331, 347, 361, 376, 383, 390, 423, 424, 452, 481, 492, 498, 514, 525, 534, 545, 551, 553, 578, 617, 624, 636, 658, 664, 665, 671, 675, 676, 694, 701, 713, 719, 725, 759, 778, 787, 793, 811, 828, 832, 835, 837, 841, 842, 849, 865, 872, 883, 886, 908, 911, 915, 920, 921, 922, 924, 951, 972, 976, 979, 991, 993, 1000, 1008, 1040, 1041, 1089, 1093, 1101, 1104, 1110, 1117, 1121, 1127, 1133, 1147, 1155, 1157, 1161, 1166, 1173, 1195, 1202, 1206, 1236, 1253, 1277, 1283, 1284, 1288, 1322, 1323, 1337, 1355, 1359, 1368, 1376, 1377, 1381, 1387, 1413, 1414, 1415, 1427, 1429, 1431, 1432, 1438, 1449, 1452, 1457, 1459, 1477, 1479, 1489, 1506, 1511, 1517, 1549, 1553, 1555, 1586, 1587, 1615, 1624, 1634, 1636, 1639, 1643, 1669, 1671, 1683, 1684, 1695, 1703, 1732, 1746, 1747, 1753, 1754, 1756, 1795, 1802, 1857, 1865, 1866, 1869, 1877, 1881, 1885, 1900, 1902, 1914, 1922, 1936, 1955, 1981, 1993, 2001, 2013, 2016, 2025, 2029, 2060, 2061, 2072, 2092, 2125, 2131, 2133, 2137, 2180, 2182, 2187, 2189, 2198, 2218, 2232, 2237, 2246, 2268, 2272, 2274, 2279, 2294, 2315, 2348, 2362, 2363, 2371, 2393, 2401, 2405, 2409, 2431, 2433, 2439, 2446, 2467, 2471, 2478, 2490, 2511, 2513, 2515, 2531, 2541, 2545, 2576, 2579, 2609, 2621, 2625, 2632, 2637, 2643, 2646, 2649, 2651, 2701, 2733, 2734, 2735, 2740, 2762, 2767, 2769, 2779, 2782, 2789, 2794, 2799, 2804, 2806, 2808, 2823, 2831, 2840, 2845, 2862, 2871, 2872, 2882, 2884, 2899, 2907, 2910, 2911, 2917, 2925, 2937, 2939, 2941, 2969, 2989, 2990, 3018, 3021, 3022, 3028, 3054, 3060, 3064, 3077, 3079, 3092, 3100, 3115, 3132, 3135, 3136, 3139, 3153, 3159, 3160, 3171, 3205, 3207, 3209, 3210, 3216, 3220, 3254, 3261, 3267, 3273, 3283, 3287, 3290, 3297, 3300, 3331, 3336, 3343, 3355, 3392, 3406, 3418, 3421, 3434, 3435, 3453, 3473, 3480, 3484, 3505, 3516, 3531, 3541, 3547, 3549, 3563, 3587, 3607, 3608, 3618, 3638, 3649, 3655, 3662, 3665, 3685, 3689, 3703, 3720, 3747, 3769, 3775, 3777, 3818, 3831, 3839, 3849, 3854, 3859, 3865, 3871, 3876, 3884, 3888, 3890, 3901, 3915, 3918, 3928, 3931, 3937, 3953, 3955, 3957, 3990, 3998, 4012, 4022, 4025, 4046, 4064, 4074, 4101, 4109, 4130, 4132, 4140, 4143, 4168, 4169, 4204, 4224, 4265, 4279, 4291, 4298, 4306, 4311, 4314, 4318, 4322, 4328, 4331, 4333, 4341, 4367, 4374, 4383, 4391, 4396, 4416, 4419, 4428, 4439, 4448, 4452, 4498, 4499, 4504, 4508, 4515, 4517, 4525, 4528, 4548, 4552, 4554, 4557, 4561, 4567, 4580, 4615, 4626, 4627, 4635, 4639, 4662, 4666, 4674, 4676, 4687, 4719, 4723, 4729, 4730, 4772, 4775, 4776, 4779, 4804, 4805, 4822, 4823, 4836, 4837, 4839, 4855, 4860, 4870, 4871, 4873, 4876, 4878, 4881, 4884, 4909, 4912, 4931, 4940, 4951, 4954, 4965, 4971, 4981, 4984, 5003, 5031, 5038, 5040, 5068, 5073, 5080, 5082, 5094, 5099, 5102, 5107, 5109, 5119, 5129, 5136, 5144, 5152, 5155, 5160, 5163, 5169, 5186, 5207, 5215, 5232, 5240, 5252, 5255, 5297, 5299, 5307, 5317, 5332, 5342, 5343, 5345, 5348, 5363, 5408, 5412, 5416, 5417, 5426, 5427, 5448, 5450, 5454, 5457, 5470, 5476, 5494, 5533, 5538, 5595, 5609, 5639, 5642, 5646, 5674, 5687, 5689, 5699, 5710, 5723, 5740, 5742, 5761, 5763, 5767, 5778, 5783, 5786, 5792, 5808, 5826, 5831, 5869, 5887, 5902, 5904, 5914, 5918, 5921, 5923, 5930, 5947, 5952, 5970, 5977, 5989, 5991, 5996, 6004, 6018, 6022, 6039, 6045, 6059, 6062, 6065, 6066, 6070, 6083, 6092, 6099, 6105, 6133, 6135, 6144, 6146, 6166, 6188, 6202, 6204, 6205, 6207, 6209, 6215, 6229, 6240, 6241, 6254, 6262, 6287, 6290, 6294, 6300, 6301, 6306, 6313, 6319, 6321, 6329, 6356, 6359, 6362, 6391, 6418, 6421, 6426, 6440, 6456, 6459, 6472, 6480, 6486, 6487, 6489, 6492, 6502, 6531, 6546, 6560, 6563, 6580], [10, 19, 29, 31, 54, 69, 71, 77, 119, 129, 164, 174, 191, 219, 220, 229, 244, 252, 255, 256, 272, 283, 285, 292, 304, 318, 320, 344, 352, 354, 372, 385, 387, 398, 401, 405, 406, 414, 430, 455, 466, 473, 474, 475, 491, 501, 503, 504, 508, 510, 513, 516, 530, 531, 539, 540, 584, 585, 604, 605, 613, 628, 630, 652, 670, 673, 686, 688, 691, 696, 732, 753, 754, 758, 783, 792, 802, 805, 807, 808, 829, 838, 843, 851, 884, 897, 901, 933, 936, 938, 957, 960, 978, 985, 990, 992, 1005, 1006, 1022, 1025, 1035, 1048, 1055, 1059, 1066, 1083, 1087, 1098, 1120, 1125, 1126, 1130, 1132, 1139, 1152, 1159, 1168, 1169, 1179, 1185, 1194, 1218, 1219, 1220, 1225, 1231, 1235, 1245, 1255, 1256, 1258, 1262, 1271, 1293, 1303, 1312, 1315, 1318, 1331, 1338, 1350, 1363, 1365, 1366, 1367, 1380, 1412, 1426, 1436, 1444, 1470, 1482, 1492, 1498, 1504, 1509, 1510, 1540, 1558, 1560, 1562, 1564, 1577, 1598, 1600, 1601, 1614, 1629, 1637, 1644, 1648, 1653, 1656, 1660, 1678, 1686, 1698, 1707, 1721, 1728, 1739, 1757, 1776, 1791, 1797, 1798, 1807, 1808, 1810, 1813, 1820, 1836, 1856, 1870, 1872, 1874, 1903, 1918, 1927, 1933, 1944, 1958, 1964, 1970, 1995, 2012, 2017, 2024, 2026, 2043, 2046, 2051, 2059, 2065, 2073, 2091, 2114, 2130, 2135, 2138, 2139, 2142, 2146, 2151, 2156, 2191, 2194, 2219, 2223, 2243, 2244, 2245, 2254, 2255, 2259, 2262, 2273, 2284, 2285, 2305, 2308, 2311, 2325, 2326, 2346, 2358, 2365, 2369, 2373, 2380, 2382, 2399, 2403, 2410, 2414, 2415, 2423, 2443, 2462, 2466, 2469, 2472, 2476, 2496, 2506, 2530, 2533, 2537, 2540, 2554, 2557, 2569, 2578, 2586, 2589, 2592, 2593, 2595, 2600, 2602, 2607, 2626, 2658, 2662, 2666, 2686, 2690, 2694, 2700, 2710, 2711, 2712, 2715, 2731, 2744, 2751, 2771, 2786, 2797, 2814, 2830, 2878, 2889, 2915, 2926, 2931, 2933, 2942, 2960, 2976, 2981, 2983, 3002, 3003, 3014, 3020, 3032, 3063, 3073, 3074, 3075, 3083, 3089, 3096, 3120, 3122, 3138, 3143, 3166, 3182, 3183, 3206, 3212, 3218, 3246, 3262, 3274, 3280, 3285, 3288, 3289, 3317, 3339, 3347, 3348, 3356, 3362, 3386, 3407, 3408, 3416, 3425, 3438, 3439, 3442, 3451, 3456, 3465, 3476, 3482, 3506, 3518, 3523, 3532, 3538, 3552, 3572, 3579, 3593, 3616, 3620, 3628, 3629, 3632, 3656, 3668, 3670, 3677, 3679, 3681, 3690, 3698, 3710, 3715, 3721, 3723, 3727, 3732, 3734, 3750, 3751, 3754, 3764, 3779, 3780, 3798, 3799, 3804, 3826, 3835, 3844, 3848, 3866, 3870, 3873, 3881, 3882, 3896, 3904, 3911, 3917, 3923, 3960, 3966, 3969, 3975, 3978, 3985, 3989, 3992, 3993, 4013, 4017, 4019, 4026, 4029, 4039, 4050, 4053, 4067, 4071, 4076, 4089, 4094, 4102, 4122, 4126, 4157, 4173, 4191, 4208, 4209, 4213, 4229, 4250, 4263, 4278, 4283, 4297, 4305, 4323, 4334, 4346, 4354, 4363, 4368, 4387, 4393, 4399, 4424, 4471, 4478, 4485, 4486, 4491, 4513, 4526, 4545, 4551, 4570, 4574, 4583, 4592, 4597, 4602, 4603, 4606, 4608, 4623, 4634, 4638, 4664, 4665, 4669, 4670, 4682, 4705, 4709, 4711, 4714, 4717, 4722, 4724, 4732, 4735, 4739, 4740, 4774, 4778, 4791, 4797, 4809, 4813, 4824, 4825, 4859, 4862, 4875, 4882, 4917, 4924, 4930, 4936, 4944, 4949, 4960, 4975, 5005, 5014, 5039, 5041, 5046, 5048, 5060, 5070, 5077, 5117, 5122, 5125, 5176, 5180, 5181, 5187, 5189, 5226, 5243, 5269, 5272, 5274, 5277, 5288, 5306, 5308, 5330, 5335, 5341, 5344, 5351, 5364, 5382, 5388, 5428, 5434, 5447, 5461, 5466, 5471, 5480, 5485, 5486, 5487, 5488, 5490, 5512, 5514, 5516, 5517, 5536, 5549, 5554, 5563, 5574, 5581, 5598, 5602, 5605, 5610, 5616, 5617, 5621, 5638, 5640, 5648, 5651, 5668, 5671, 5678, 5691, 5715, 5747, 5749, 5754, 5758, 5764, 5772, 5779, 5782, 5785, 5796, 5800, 5810, 5820, 5834, 5845, 5856, 5857, 5858, 5859, 5868, 5875, 5879, 5883, 5884, 5889, 5897, 5898, 5919, 5922, 5924, 5969, 5986, 5988, 5990, 5993, 6000, 6001, 6003, 6012, 6029, 6035, 6040, 6044, 6050, 6057, 6132, 6134, 6163, 6172, 6178, 6181, 6186, 6190, 6219, 6223, 6260, 6264, 6267, 6269, 6285, 6286, 6295, 6304, 6323, 6325, 6333, 6343, 6344, 6345, 6348, 6351, 6355, 6360, 6363, 6369, 6372, 6374, 6395, 6409, 6420, 6428, 6455, 6457, 6463, 6466, 6476, 6500, 6517, 6520, 6525, 6534, 6552, 6564, 6570, 6575, 6576], [0, 3, 4, 6, 7, 9, 11, 13, 14, 15, 16, 17, 20, 21, 24, 30, 34, 35, 39, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 53, 55, 56, 57, 58, 59, 61, 62, 63, 64, 65, 66, 68, 70, 72, 73, 74, 76, 78, 79, 80, 81, 82, 83, 85, 87, 88, 91, 93, 95, 96, 97, 98, 99, 100, 102, 103, 104, 105, 106, 107, 108, 109, 112, 113, 115, 116, 117, 118, 120, 121, 124, 127, 128, 130, 132, 135, 136, 137, 138, 139, 140, 141, 142, 143, 145, 146, 147, 149, 150, 152, 154, 155, 158, 163, 167, 168, 169, 171, 172, 175, 176, 179, 181, 183, 184, 187, 189, 190, 192, 193, 196, 197, 198, 199, 201, 202, 204, 205, 206, 207, 208, 209, 210, 211, 213, 215, 217, 221, 222, 224, 225, 226, 227, 231, 232, 234, 235, 236, 237, 238, 239, 241, 242, 243, 247, 250, 251, 253, 254, 258, 259, 260, 261, 262, 264, 266, 267, 269, 273, 275, 276, 277, 278, 279, 280, 281, 282, 284, 286, 287, 288, 290, 291, 295, 296, 297, 298, 299, 300, 302, 303, 306, 308, 309, 313, 316, 317, 323, 324, 326, 328, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 345, 346, 348, 349, 350, 353, 355, 356, 357, 358, 359, 363, 364, 365, 366, 367, 368, 369, 370, 371, 373, 374, 377, 378, 379, 381, 384, 386, 388, 392, 393, 394, 395, 396, 399, 403, 407, 408, 409, 411, 412, 413, 415, 417, 418, 419, 420, 421, 422, 427, 428, 429, 431, 432, 433, 434, 435, 437, 438, 439, 440, 442, 443, 444, 446, 447, 448, 449, 450, 451, 454, 456, 458, 459, 460, 462, 465, 467, 468, 469, 470, 471, 472, 476, 477, 479, 480, 482, 483, 485, 487, 490, 494, 495, 496, 497, 499, 502, 505, 511, 512, 518, 519, 522, 523, 524, 527, 528, 529, 533, 536, 538, 541, 543, 544, 546, 547, 549, 550, 554, 555, 556, 557, 558, 559, 560, 562, 563, 565, 566, 567, 568, 569, 570, 572, 573, 575, 576, 577, 580, 581, 582, 583, 586, 587, 589, 590, 591, 592, 593, 594, 595, 596, 600, 601, 602, 603, 607, 608, 609, 610, 611, 612, 614, 615, 616, 618, 620, 621, 622, 623, 625, 629, 631, 632, 633, 634, 635, 637, 638, 640, 644, 646, 647, 650, 651, 653, 655, 657, 659, 662, 663, 667, 668, 669, 672, 677, 679, 680, 682, 685, 687, 689, 692, 698, 699, 702, 703, 704, 705, 708, 709, 710, 711, 712, 714, 715, 717, 718, 720, 721, 722, 723, 724, 726, 727, 729, 730, 733, 735, 736, 737, 738, 739, 741, 743, 744, 745, 746, 747, 750, 751, 752, 755, 756, 757, 760, 761, 762, 763, 765, 766, 767, 768, 769, 770, 771, 772, 774, 777, 781, 784, 786, 788, 790, 791, 795, 798, 799, 800, 801, 803, 810, 814, 816, 819, 820, 821, 822, 823, 824, 825, 826, 827, 831, 833, 834, 836, 839, 840, 844, 845, 846, 848, 850, 852, 853, 855, 856, 858, 859, 860, 862, 867, 868, 869, 873, 874, 875, 878, 879, 880, 882, 889, 891, 893, 895, 898, 899, 900, 902, 904, 907, 909, 910, 913, 917, 918, 923, 925, 926, 927, 928, 929, 930, 931, 934, 940, 941, 943, 944, 947, 948, 949, 950, 952, 953, 954, 956, 959, 961, 962, 963, 964, 966, 967, 968, 969, 970, 971, 973, 974, 981, 982, 983, 984, 987, 988, 989, 996, 997, 998, 999, 1001, 1002, 1003, 1004, 1007, 1009, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1024, 1026, 1027, 1029, 1031, 1033, 1034, 1036, 1039, 1044, 1049, 1050, 1051, 1052, 1053, 1054, 1062, 1063, 1064, 1065, 1067, 1069, 1071, 1072, 1073, 1074, 1075, 1077, 1080, 1081, 1084, 1085, 1086, 1088, 1090, 1091, 1094, 1095, 1096, 1097, 1099, 1100, 1102, 1103, 1105, 1106, 1107, 1109, 1111, 1112, 1113, 1114, 1115, 1119, 1122, 1123, 1124, 1131, 1134, 1136, 1137, 1138, 1140, 1142, 1143, 1145, 1148, 1150, 1151, 1153, 1156, 1160, 1162, 1163, 1165, 1167, 1170, 1171, 1172, 1174, 1180, 1181, 1183, 1186, 1187, 1188, 1189, 1191, 1192, 1193, 1197, 1198, 1199, 1200, 1201, 1203, 1204, 1205, 1207, 1209, 1212, 1214, 1216, 1217, 1222, 1223, 1224, 1227, 1228, 1229, 1230, 1232, 1233, 1237, 1238, 1243, 1244, 1246, 1247, 1250, 1254, 1257, 1259, 1261, 1263, 1264, 1265, 1266, 1268, 1273, 1274, 1275, 1278, 1281, 1282, 1285, 1286, 1287, 1289, 1290, 1291, 1292, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1304, 1305, 1307, 1308, 1309, 1310, 1313, 1314, 1317, 1319, 1320, 1321, 1324, 1327, 1329, 1330, 1332, 1333, 1334, 1335, 1336, 1340, 1341, 1342, 1343, 1346, 1347, 1349, 1351, 1352, 1353, 1354, 1356, 1357, 1361, 1364, 1369, 1370, 1371, 1372, 1373, 1375, 1378, 1379, 1382, 1383, 1384, 1385, 1386, 1388, 1390, 1391, 1392, 1393, 1394, 1395, 1396, 1397, 1398, 1399, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1411, 1416, 1417, 1418, 1419, 1421, 1423, 1424, 1425, 1428, 1433, 1434, 1439, 1440, 1441, 1442, 1443, 1445, 1446, 1447, 1448, 1450, 1451, 1453, 1454, 1458, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1471, 1472, 1474, 1475, 1476, 1478, 1480, 1481, 1483, 1484, 1485, 1486, 1488, 1495, 1496, 1497, 1499, 1500, 1501, 1502, 1507, 1508, 1512, 1513, 1514, 1516, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1527, 1528, 1530, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1542, 1544, 1545, 1547, 1548, 1550, 1551, 1552, 1554, 1556, 1559, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1576, 1578, 1579, 1580, 1581, 1583, 1584, 1585, 1588, 1589, 1590, 1591, 1592, 1593, 1594, 1595, 1596, 1599, 1603, 1605, 1607, 1608, 1609, 1610, 1611, 1613, 1616, 1619, 1620, 1621, 1622, 1623, 1625, 1626, 1627, 1628, 1630, 1631, 1632, 1633, 1635, 1638, 1641, 1642, 1647, 1649, 1651, 1652, 1654, 1655, 1657, 1658, 1664, 1666, 1667, 1674, 1675, 1677, 1679, 1681, 1682, 1685, 1687, 1688, 1691, 1692, 1693, 1694, 1696, 1697, 1699, 1700, 1701, 1702, 1704, 1705, 1706, 1708, 1709, 1711, 1712, 1714, 1715, 1716, 1718, 1719, 1720, 1722, 1724, 1727, 1729, 1730, 1731, 1733, 1734, 1735, 1736, 1737, 1738, 1740, 1741, 1744, 1748, 1750, 1752, 1755, 1758, 1762, 1765, 1766, 1768, 1769, 1771, 1772, 1773, 1774, 1777, 1778, 1779, 1785, 1787, 1788, 1790, 1792, 1793, 1796, 1800, 1801, 1804, 1806, 1814, 1815, 1816, 1818, 1821, 1823, 1826, 1827, 1829, 1830, 1832, 1833, 1834, 1835, 1837, 1838, 1840, 1841, 1843, 1845, 1846, 1847, 1848, 1851, 1852, 1853, 1854, 1858, 1860, 1861, 1862, 1863, 1864, 1867, 1868, 1871, 1873, 1875, 1878, 1879, 1880, 1882, 1883, 1884, 1886, 1887, 1888, 1890, 1891, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1901, 1905, 1906, 1907, 1908, 1910, 1911, 1915, 1916, 1917, 1920, 1921, 1923, 1924, 1925, 1926, 1928, 1930, 1931, 1934, 1935, 1937, 1938, 1939, 1940, 1941, 1942, 1943, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1957, 1959, 1960, 1961, 1962, 1963, 1965, 1966, 1968, 1969, 1971, 1973, 1974, 1976, 1977, 1978, 1979, 1983, 1986, 1987, 1988, 1991, 1992, 1997, 1998, 2000, 2002, 2003, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2015, 2018, 2019, 2020, 2023, 2027, 2028, 2031, 2032, 2033, 2035, 2036, 2037, 2038, 2040, 2042, 2045, 2047, 2048, 2049, 2050, 2052, 2054, 2056, 2057, 2058, 2062, 2063, 2066, 2069, 2070, 2071, 2074, 2075, 2076, 2077, 2078, 2079, 2080, 2081, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2090, 2093, 2094, 2095, 2096, 2098, 2099, 2100, 2101, 2102, 2103, 2104, 2109, 2111, 2112, 2113, 2115, 2116, 2118, 2120, 2121, 2122, 2123, 2124, 2126, 2128, 2134, 2136, 2140, 2141, 2143, 2144, 2147, 2148, 2149, 2150, 2152, 2153, 2155, 2157, 2158, 2159, 2160, 2161, 2162, 2163, 2165, 2167, 2168, 2169, 2170, 2174, 2175, 2176, 2178, 2179, 2183, 2185, 2186, 2188, 2190, 2192, 2195, 2197, 2199, 2200, 2201, 2202, 2203, 2204, 2205, 2206, 2207, 2208, 2209, 2210, 2211, 2212, 2213, 2214, 2215, 2216, 2217, 2220, 2225, 2227, 2228, 2230, 2231, 2233, 2234, 2236, 2238, 2240, 2241, 2247, 2248, 2249, 2250, 2251, 2253, 2256, 2257, 2258, 2260, 2263, 2264, 2265, 2269, 2270, 2271, 2275, 2276, 2277, 2278, 2280, 2281, 2283, 2286, 2287, 2289, 2290, 2292, 2295, 2296, 2297, 2298, 2300, 2301, 2302, 2303, 2307, 2310, 2312, 2313, 2314, 2317, 2318, 2319, 2320, 2321, 2322, 2323, 2324, 2327, 2331, 2332, 2333, 2334, 2335, 2338, 2339, 2340, 2342, 2343, 2344, 2345, 2347, 2349, 2350, 2352, 2355, 2356, 2360, 2361, 2364, 2366, 2367, 2368, 2370, 2372, 2374, 2375, 2377, 2379, 2381, 2386, 2388, 2392, 2394, 2397, 2398, 2400, 2404, 2406, 2407, 2408, 2411, 2413, 2416, 2417, 2418, 2420, 2422, 2424, 2428, 2429, 2432, 2434, 2435, 2436, 2437, 2438, 2440, 2441, 2442, 2444, 2445, 2454, 2456, 2457, 2458, 2459, 2460, 2461, 2464, 2465, 2468, 2473, 2474, 2477, 2479, 2480, 2481, 2482, 2483, 2484, 2485, 2489, 2491, 2493, 2495, 2497, 2499, 2500, 2504, 2505, 2507, 2508, 2509, 2510, 2514, 2516, 2517, 2520, 2521, 2522, 2524, 2526, 2527, 2529, 2534, 2535, 2536, 2538, 2542, 2543, 2544, 2547, 2549, 2550, 2552, 2553, 2555, 2556, 2558, 2559, 2560, 2561, 2563, 2564, 2565, 2566, 2567, 2568, 2570, 2571, 2572, 2574, 2577, 2580, 2582, 2583, 2584, 2585, 2587, 2588, 2591, 2594, 2599, 2601, 2604, 2605, 2606, 2610, 2611, 2612, 2613, 2614, 2615, 2616, 2617, 2618, 2620, 2622, 2624, 2628, 2629, 2630, 2633, 2634, 2635, 2636, 2638, 2639, 2640, 2641, 2642, 2644, 2645, 2648, 2650, 2652, 2654, 2655, 2656, 2659, 2661, 2664, 2665, 2667, 2668, 2669, 2670, 2673, 2674, 2676, 2677, 2678, 2679, 2680, 2681, 2684, 2685, 2687, 2688, 2689, 2691, 2693, 2695, 2696, 2697, 2698, 2699, 2702, 2704, 2705, 2706, 2707, 2708, 2713, 2714, 2717, 2719, 2720, 2722, 2723, 2724, 2725, 2726, 2727, 2729, 2730, 2738, 2745, 2746, 2747, 2748, 2749, 2750, 2752, 2754, 2755, 2756, 2757, 2759, 2760, 2761, 2765, 2766, 2768, 2772, 2773, 2774, 2776, 2777, 2778, 2781, 2783, 2784, 2785, 2788, 2790, 2791, 2792, 2796, 2798, 2803, 2805, 2809, 2810, 2811, 2812, 2813, 2816, 2818, 2821, 2824, 2825, 2827, 2828, 2829, 2833, 2836, 2837, 2838, 2839, 2842, 2843, 2844, 2846, 2848, 2849, 2850, 2851, 2852, 2856, 2858, 2859, 2861, 2863, 2864, 2865, 2866, 2867, 2869, 2874, 2875, 2877, 2879, 2880, 2881, 2883, 2887, 2890, 2891, 2893, 2894, 2896, 2898, 2900, 2901, 2902, 2905, 2908, 2912, 2914, 2916, 2919, 2921, 2923, 2924, 2927, 2928, 2929, 2930, 2932, 2935, 2938, 2940, 2945, 2946, 2947, 2948, 2949, 2950, 2952, 2953, 2954, 2956, 2957, 2958, 2959, 2961, 2962, 2964, 2965, 2968, 2970, 2971, 2972, 2975, 2979, 2984, 2985, 2986, 2988, 2991, 2992, 2993, 2995, 2997, 2998, 2999, 3004, 3005, 3006, 3007, 3008, 3010, 3012, 3013, 3015, 3016, 3017, 3024, 3026, 3027, 3029, 3031, 3033, 3034, 3035, 3036, 3038, 3040, 3041, 3043, 3044, 3045, 3046, 3047, 3048, 3051, 3052, 3053, 3055, 3056, 3058, 3059, 3061, 3062, 3065, 3066, 3067, 3068, 3071, 3072, 3076, 3081, 3082, 3084, 3086, 3088, 3090, 3091, 3093, 3095, 3097, 3098, 3099, 3102, 3106, 3107, 3108, 3109, 3111, 3119, 3121, 3124, 3126, 3127, 3128, 3130, 3133, 3137, 3140, 3144, 3145, 3146, 3149, 3152, 3154, 3155, 3156, 3157, 3161, 3162, 3163, 3164, 3165, 3167, 3168, 3169, 3170, 3172, 3173, 3174, 3175, 3176, 3178, 3179, 3180, 3181, 3184, 3185, 3186, 3187, 3189, 3190, 3191, 3193, 3195, 3196, 3198, 3199, 3201, 3202, 3203, 3211, 3213, 3214, 3215, 3222, 3223, 3224, 3227, 3228, 3229, 3230, 3231, 3232, 3233, 3234, 3235, 3239, 3240, 3241, 3242, 3249, 3250, 3251, 3252, 3253, 3255, 3256, 3257, 3259, 3260, 3263, 3265, 3268, 3269, 3270, 3271, 3275, 3276, 3279, 3281, 3282, 3284, 3286, 3291, 3292, 3294, 3296, 3298, 3301, 3302, 3303, 3304, 3308, 3311, 3312, 3313, 3314, 3321, 3322, 3323, 3324, 3325, 3326, 3327, 3329, 3330, 3333, 3334, 3337, 3338, 3340, 3342, 3345, 3346, 3349, 3350, 3351, 3354, 3357, 3358, 3359, 3360, 3361, 3363, 3364, 3365, 3366, 3367, 3368, 3369, 3370, 3373, 3374, 3375, 3376, 3377, 3378, 3379, 3380, 3384, 3385, 3388, 3389, 3390, 3391, 3393, 3394, 3395, 3397, 3401, 3402, 3404, 3405, 3410, 3411, 3412, 3414, 3415, 3417, 3419, 3422, 3423, 3427, 3428, 3429, 3430, 3431, 3432, 3433, 3436, 3437, 3440, 3441, 3443, 3444, 3447, 3448, 3450, 3452, 3454, 3455, 3457, 3459, 3460, 3461, 3462, 3463, 3464, 3466, 3467, 3469, 3470, 3471, 3474, 3475, 3477, 3478, 3479, 3481, 3485, 3486, 3487, 3488, 3489, 3490, 3493, 3494, 3495, 3496, 3497, 3498, 3499, 3500, 3501, 3502, 3503, 3507, 3508, 3509, 3510, 3511, 3512, 3513, 3514, 3515, 3520, 3522, 3524, 3525, 3526, 3527, 3528, 3529, 3530, 3533, 3534, 3535, 3536, 3537, 3539, 3540, 3542, 3544, 3545, 3548, 3550, 3551, 3554, 3555, 3556, 3557, 3558, 3559, 3561, 3562, 3564, 3565, 3567, 3568, 3570, 3571, 3573, 3574, 3575, 3577, 3578, 3580, 3582, 3584, 3585, 3588, 3589, 3590, 3594, 3595, 3596, 3597, 3598, 3599, 3600, 3601, 3602, 3603, 3604, 3605, 3606, 3609, 3610, 3611, 3613, 3614, 3615, 3617, 3619, 3621, 3622, 3623, 3624, 3625, 3626, 3627, 3630, 3633, 3635, 3637, 3639, 3642, 3643, 3647, 3650, 3651, 3652, 3653, 3657, 3658, 3661, 3663, 3664, 3666, 3667, 3671, 3672, 3684, 3688, 3691, 3692, 3693, 3694, 3695, 3697, 3699, 3700, 3702, 3705, 3708, 3711, 3716, 3717, 3718, 3719, 3726, 3728, 3729, 3730, 3733, 3735, 3736, 3738, 3739, 3741, 3743, 3745, 3746, 3748, 3749, 3752, 3753, 3755, 3757, 3758, 3761, 3762, 3763, 3765, 3766, 3767, 3771, 3772, 3773, 3778, 3781, 3783, 3786, 3787, 3788, 3789, 3792, 3793, 3794, 3796, 3800, 3801, 3802, 3803, 3805, 3806, 3807, 3808, 3810, 3812, 3813, 3814, 3815, 3816, 3820, 3824, 3825, 3827, 3829, 3830, 3832, 3833, 3834, 3836, 3837, 3838, 3841, 3842, 3843, 3845, 3847, 3850, 3851, 3852, 3853, 3855, 3860, 3861, 3862, 3864, 3867, 3868, 3874, 3875, 3877, 3880, 3886, 3887, 3889, 3892, 3893, 3894, 3895, 3897, 3898, 3900, 3902, 3903, 3905, 3906, 3907, 3909, 3910, 3912, 3913, 3914, 3919, 3920, 3921, 3922, 3924, 3925, 3927, 3930, 3932, 3934, 3935, 3936, 3939, 3940, 3941, 3942, 3943, 3945, 3947, 3948, 3951, 3952, 3954, 3956, 3958, 3959, 3961, 3962, 3963, 3964, 3965, 3967, 3968, 3970, 3971, 3972, 3974, 3977, 3979, 3980, 3981, 3982, 3983, 3984, 3986, 3987, 3988, 3991, 3994, 3995, 3996, 3997, 4000, 4001, 4003, 4005, 4006, 4007, 4009, 4011, 4014, 4015, 4016, 4018, 4020, 4023, 4024, 4027, 4028, 4030, 4034, 4035, 4036, 4037, 4038, 4040, 4042, 4043, 4044, 4045, 4047, 4051, 4052, 4054, 4055, 4058, 4059, 4060, 4062, 4063, 4069, 4070, 4077, 4078, 4080, 4082, 4085, 4087, 4088, 4090, 4091, 4092, 4093, 4095, 4096, 4097, 4098, 4099, 4103, 4104, 4105, 4106, 4107, 4108, 4110, 4113, 4115, 4116, 4117, 4118, 4119, 4120, 4121, 4123, 4127, 4128, 4129, 4131, 4133, 4134, 4135, 4138, 4139, 4142, 4144, 4145, 4146, 4147, 4148, 4149, 4152, 4153, 4155, 4156, 4158, 4159, 4160, 4161, 4164, 4165, 4166, 4167, 4170, 4171, 4172, 4174, 4175, 4176, 4177, 4178, 4179, 4180, 4182, 4183, 4184, 4185, 4187, 4189, 4190, 4192, 4193, 4194, 4195, 4196, 4197, 4198, 4201, 4202, 4203, 4205, 4206, 4210, 4211, 4212, 4214, 4215, 4216, 4217, 4218, 4220, 4221, 4222, 4223, 4226, 4228, 4232, 4233, 4236, 4237, 4238, 4239, 4240, 4241, 4243, 4244, 4245, 4248, 4249, 4251, 4252, 4254, 4255, 4257, 4258, 4259, 4261, 4264, 4266, 4267, 4269, 4270, 4271, 4272, 4273, 4275, 4276, 4277, 4282, 4284, 4285, 4286, 4288, 4289, 4292, 4293, 4295, 4296, 4299, 4300, 4301, 4302, 4303, 4307, 4309, 4310, 4312, 4313, 4316, 4319, 4320, 4325, 4327, 4329, 4330, 4332, 4337, 4339, 4340, 4342, 4345, 4348, 4349, 4350, 4351, 4352, 4353, 4356, 4357, 4358, 4359, 4361, 4364, 4365, 4366, 4369, 4371, 4373, 4376, 4377, 4378, 4380, 4381, 4384, 4385, 4386, 4388, 4390, 4394, 4400, 4401, 4402, 4404, 4405, 4407, 4408, 4409, 4411, 4412, 4413, 4414, 4415, 4417, 4418, 4420, 4422, 4423, 4425, 4426, 4427, 4429, 4432, 4434, 4435, 4436, 4437, 4438, 4440, 4441, 4443, 4444, 4445, 4446, 4447, 4449, 4450, 4451, 4454, 4455, 4457, 4460, 4462, 4463, 4466, 4467, 4468, 4469, 4470, 4472, 4473, 4474, 4476, 4477, 4479, 4480, 4481, 4482, 4483, 4484, 4487, 4488, 4489, 4490, 4492, 4494, 4495, 4496, 4501, 4502, 4503, 4506, 4509, 4511, 4512, 4514, 4518, 4519, 4520, 4521, 4523, 4524, 4527, 4529, 4530, 4531, 4533, 4534, 4536, 4538, 4540, 4541, 4542, 4543, 4544, 4546, 4547, 4549, 4553, 4556, 4558, 4559, 4560, 4562, 4563, 4564, 4565, 4566, 4568, 4571, 4572, 4573, 4575, 4576, 4577, 4579, 4581, 4582, 4584, 4585, 4586, 4587, 4588, 4590, 4591, 4593, 4594, 4596, 4598, 4600, 4607, 4609, 4610, 4612, 4614, 4616, 4617, 4619, 4620, 4624, 4625, 4628, 4629, 4630, 4631, 4632, 4637, 4640, 4643, 4644, 4645, 4646, 4647, 4648, 4649, 4651, 4652, 4653, 4655, 4657, 4658, 4659, 4660, 4661, 4667, 4668, 4671, 4672, 4673, 4675, 4677, 4678, 4679, 4681, 4683, 4684, 4685, 4686, 4688, 4689, 4690, 4691, 4693, 4695, 4697, 4700, 4701, 4702, 4704, 4707, 4708, 4710, 4713, 4715, 4716, 4720, 4725, 4726, 4728, 4731, 4733, 4734, 4736, 4737, 4738, 4741, 4742, 4743, 4744, 4745, 4747, 4748, 4750, 4751, 4752, 4753, 4754, 4755, 4756, 4757, 4759, 4760, 4761, 4763, 4765, 4766, 4767, 4768, 4770, 4771, 4773, 4777, 4780, 4781, 4782, 4784, 4786, 4787, 4788, 4789, 4790, 4792, 4794, 4796, 4799, 4800, 4801, 4802, 4803, 4806, 4808, 4810, 4812, 4814, 4815, 4820, 4821, 4826, 4827, 4828, 4829, 4831, 4832, 4833, 4835, 4838, 4840, 4842, 4843, 4845, 4846, 4848, 4849, 4850, 4853, 4854, 4856, 4857, 4858, 4861, 4863, 4865, 4866, 4867, 4872, 4874, 4879, 4880, 4886, 4887, 4888, 4891, 4893, 4895, 4896, 4897, 4898, 4900, 4905, 4906, 4907, 4908, 4910, 4911, 4913, 4914, 4915, 4916, 4918, 4919, 4922, 4923, 4926, 4927, 4928, 4929, 4934, 4935, 4938, 4939, 4941, 4943, 4946, 4948, 4950, 4952, 4953, 4955, 4956, 4957, 4958, 4961, 4962, 4963, 4964, 4966, 4967, 4968, 4969, 4970, 4972, 4973, 4976, 4977, 4978, 4979, 4980, 4982, 4983, 4985, 4986, 4988, 4989, 4991, 4993, 4994, 4995, 4996, 4997, 5004, 5006, 5007, 5009, 5010, 5011, 5013, 5016, 5017, 5018, 5020, 5023, 5024, 5025, 5029, 5032, 5034, 5035, 5037, 5042, 5043, 5044, 5045, 5047, 5050, 5052, 5053, 5054, 5057, 5058, 5059, 5063, 5064, 5065, 5067, 5069, 5074, 5075, 5076, 5078, 5079, 5084, 5085, 5086, 5087, 5089, 5090, 5092, 5093, 5097, 5100, 5101, 5103, 5104, 5108, 5110, 5111, 5112, 5113, 5114, 5115, 5116, 5118, 5120, 5123, 5124, 5126, 5127, 5128, 5130, 5131, 5132, 5133, 5134, 5137, 5138, 5139, 5141, 5142, 5143, 5148, 5149, 5150, 5151, 5154, 5156, 5157, 5161, 5162, 5164, 5166, 5167, 5168, 5170, 5171, 5172, 5174, 5175, 5178, 5179, 5182, 5184, 5185, 5188, 5193, 5194, 5195, 5196, 5198, 5199, 5200, 5201, 5202, 5204, 5208, 5210, 5211, 5214, 5217, 5218, 5219, 5220, 5221, 5222, 5223, 5224, 5225, 5227, 5228, 5230, 5234, 5237, 5241, 5244, 5245, 5246, 5247, 5254, 5256, 5257, 5258, 5259, 5260, 5261, 5262, 5263, 5264, 5265, 5267, 5268, 5270, 5271, 5275, 5276, 5278, 5280, 5281, 5282, 5283, 5287, 5289, 5290, 5291, 5292, 5295, 5296, 5298, 5301, 5302, 5304, 5305, 5309, 5310, 5315, 5318, 5319, 5320, 5321, 5322, 5323, 5324, 5325, 5326, 5328, 5329, 5331, 5333, 5334, 5336, 5337, 5338, 5339, 5346, 5353, 5354, 5355, 5357, 5358, 5359, 5360, 5361, 5367, 5371, 5373, 5377, 5379, 5380, 5381, 5384, 5385, 5386, 5387, 5389, 5390, 5391, 5392, 5393, 5394, 5395, 5400, 5401, 5402, 5404, 5406, 5409, 5410, 5413, 5414, 5415, 5418, 5419, 5420, 5421, 5422, 5425, 5429, 5430, 5431, 5432, 5435, 5436, 5437, 5438, 5441, 5442, 5444, 5445, 5446, 5449, 5451, 5452, 5453, 5456, 5458, 5459, 5460, 5463, 5464, 5465, 5467, 5469, 5472, 5473, 5474, 5475, 5477, 5478, 5479, 5483, 5484, 5491, 5492, 5498, 5501, 5503, 5504, 5505, 5506, 5507, 5508, 5510, 5511, 5515, 5518, 5519, 5521, 5523, 5524, 5525, 5527, 5530, 5532, 5534, 5535, 5540, 5541, 5543, 5544, 5545, 5546, 5548, 5551, 5552, 5553, 5558, 5559, 5561, 5562, 5565, 5566, 5567, 5568, 5572, 5573, 5575, 5576, 5577, 5578, 5579, 5580, 5582, 5584, 5586, 5587, 5588, 5590, 5591, 5592, 5593, 5594, 5596, 5597, 5599, 5601, 5603, 5606, 5608, 5611, 5612, 5614, 5618, 5619, 5622, 5623, 5625, 5626, 5627, 5629, 5630, 5631, 5632, 5633, 5634, 5635, 5637, 5641, 5644, 5645, 5647, 5649, 5650, 5652, 5653, 5654, 5655, 5656, 5657, 5659, 5660, 5661, 5662, 5663, 5664, 5665, 5670, 5672, 5673, 5675, 5676, 5677, 5679, 5681, 5682, 5685, 5688, 5692, 5693, 5694, 5695, 5696, 5697, 5698, 5700, 5701, 5702, 5703, 5704, 5705, 5706, 5708, 5709, 5711, 5713, 5716, 5718, 5719, 5722, 5724, 5725, 5726, 5727, 5729, 5730, 5731, 5732, 5733, 5736, 5738, 5739, 5741, 5743, 5744, 5745, 5746, 5748, 5750, 5751, 5752, 5753, 5756, 5757, 5762, 5765, 5766, 5768, 5773, 5774, 5775, 5784, 5787, 5788, 5790, 5791, 5793, 5794, 5795, 5797, 5798, 5799, 5802, 5803, 5804, 5809, 5811, 5812, 5813, 5814, 5815, 5817, 5821, 5822, 5823, 5825, 5827, 5828, 5829, 5830, 5833, 5835, 5836, 5837, 5838, 5840, 5841, 5844, 5846, 5847, 5848, 5849, 5850, 5851, 5852, 5853, 5854, 5855, 5860, 5862, 5863, 5864, 5865, 5866, 5867, 5870, 5872, 5873, 5874, 5876, 5878, 5882, 5886, 5888, 5890, 5891, 5893, 5894, 5895, 5896, 5899, 5900, 5903, 5905, 5906, 5908, 5913, 5915, 5916, 5917, 5920, 5925, 5927, 5928, 5929, 5931, 5932, 5933, 5934, 5935, 5936, 5937, 5938, 5941, 5945, 5946, 5948, 5949, 5950, 5951, 5953, 5955, 5959, 5961, 5962, 5964, 5965, 5966, 5967, 5968, 5972, 5973, 5974, 5975, 5976, 5978, 5980, 5982, 5983, 5984, 5985, 5987, 5992, 5994, 5995, 5997, 5998, 5999, 6002, 6005, 6006, 6008, 6009, 6010, 6013, 6014, 6015, 6017, 6019, 6020, 6021, 6023, 6024, 6026, 6027, 6028, 6030, 6031, 6032, 6033, 6034, 6036, 6038, 6041, 6042, 6043, 6047, 6051, 6052, 6053, 6054, 6055, 6056, 6058, 6060, 6061, 6064, 6067, 6068, 6069, 6071, 6072, 6073, 6075, 6076, 6077, 6079, 6080, 6081, 6082, 6084, 6087, 6089, 6090, 6093, 6095, 6097, 6098, 6100, 6101, 6102, 6103, 6106, 6107, 6109, 6111, 6112, 6113, 6114, 6115, 6116, 6118, 6119, 6120, 6121, 6122, 6124, 6125, 6126, 6127, 6129, 6131, 6136, 6139, 6141, 6142, 6143, 6147, 6148, 6150, 6152, 6154, 6155, 6156, 6157, 6159, 6160, 6161, 6162, 6164, 6167, 6170, 6171, 6173, 6174, 6176, 6179, 6180, 6183, 6184, 6185, 6187, 6191, 6192, 6193, 6194, 6195, 6196, 6198, 6199, 6200, 6201, 6203, 6206, 6208, 6210, 6211, 6212, 6213, 6217, 6218, 6221, 6222, 6224, 6226, 6228, 6230, 6231, 6232, 6233, 6234, 6235, 6236, 6237, 6238, 6242, 6243, 6245, 6246, 6248, 6249, 6250, 6251, 6253, 6255, 6257, 6258, 6259, 6261, 6265, 6268, 6271, 6272, 6274, 6275, 6277, 6279, 6281, 6282, 6284, 6288, 6289, 6291, 6292, 6296, 6297, 6299, 6302, 6307, 6308, 6309, 6310, 6311, 6312, 6314, 6317, 6318, 6320, 6322, 6324, 6326, 6327, 6328, 6330, 6331, 6332, 6334, 6335, 6336, 6337, 6338, 6341, 6346, 6347, 6349, 6352, 6353, 6357, 6358, 6361, 6364, 6365, 6366, 6367, 6368, 6373, 6375, 6376, 6377, 6378, 6379, 6380, 6381, 6383, 6384, 6385, 6386, 6387, 6388, 6389, 6390, 6392, 6396, 6397, 6398, 6399, 6400, 6402, 6403, 6404, 6406, 6407, 6411, 6412, 6413, 6414, 6415, 6416, 6422, 6423, 6424, 6425, 6427, 6430, 6431, 6432, 6433, 6434, 6435, 6436, 6437, 6438, 6439, 6442, 6443, 6444, 6446, 6449, 6450, 6454, 6458, 6460, 6461, 6464, 6465, 6468, 6470, 6471, 6473, 6474, 6475, 6477, 6481, 6482, 6483, 6484, 6488, 6490, 6491, 6493, 6495, 6497, 6498, 6499, 6501, 6503, 6504, 6505, 6508, 6509, 6511, 6513, 6514, 6515, 6518, 6519, 6522, 6523, 6526, 6527, 6528, 6530, 6532, 6533, 6535, 6536, 6537, 6538, 6539, 6540, 6541, 6542, 6543, 6544, 6547, 6548, 6550, 6551, 6553, 6554, 6555, 6557, 6558, 6559, 6561, 6562, 6565, 6566, 6567, 6568, 6571, 6573, 6577, 6578, 6579, 6581, 6582, 6586, 6587, 6588]]
Label class of each point :
0 Stayed
1 Stayed
2 Churned
3 Churned
4 Churned
...
7037 Churned
7038 Stayed
7039 Churned
7041 Stayed
7042 Stayed
Name: Customer Status, Length: 6589, dtype: category
Categories (2, object): ['Churned', 'Stayed']
Index of the best medoids : [1260, 4154, 3100, 2830, 6199]
Clustering Visualizations:
DBSCAN Clustering:
# Applying PCA for dimensionality reduction
pca = PCA(n_components=2)
data_pca = pca.fit_transform(data)
# Creating a scatter plot for DBSCAN clustering results
plt.figure(figsize=(8, 6))
plt.scatter(data_pca[:, 0], data_pca[:, 1], c=dbscan_labels, cmap='rainbow', s=50)
plt.title("DBSCAN Clustering")
plt.xlabel("Principal Component 1")
plt.ylabel("Principal Component 2")
plt.colorbar(label="Cluster Labels")
plt.show()
KMeans Clustering:
# Applying PCA for dimensionality reduction
pca = PCA(n_components=2)
data_pca = pca.fit_transform(data)
# Creating a scatter plot for KMeans clustering results
plt.figure(figsize=(8, 6))
plt.scatter(data_pca[:, 0], data_pca[:, 1], c=kmeans_labels, cmap='rainbow', s=50)
plt.title("KMeans Clustering")
plt.xlabel("Principal Component 1")
plt.ylabel("Principal Component 2")
plt.colorbar(label="Cluster Labels")
plt.show()
CLARNS Clustering:
# Retrieving the CLARANS cluster assignments
clarans_clusters = clarans_instance.get_clusters()
# Creating a scatter plot for visualizing the clusters
plt.figure(figsize=(6, 4))
for cluster_idx, cluster in enumerate(clarans_clusters):
cluster_points = data[cluster]
plt.scatter(cluster_points[:, 0], cluster_points[:, 1], label=f'Cluster {cluster_idx + 1}')
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.title('CLARANS Clustering Results')
plt.legend()
plt.show()
Neural Networks Modeling:
#Gathering CLARANS Clustering labels for modeling data
num_clusters = len(np.unique(np.concatenate(clarans_clusters)))
cluster_feature = np.zeros((len(data), num_clusters))
for cluster_idx, cluster in enumerate(clarans_clusters):
cluster_feature[cluster, cluster_idx] = 1
data_with_cluster = np.hstack((data, cluster_feature))
X = data_with_cluster
y = df_y
# Splitting data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
df_X_test = pd.DataFrame(X_test)
X_test_indices = df_X_test.index
customerID = originalData.loc[X_test_indices, 'Customer ID']
# Encoding target labels
label_encoder = LabelEncoder()
y_train_encoded = label_encoder.fit_transform(y_train)
y_test_encoded = label_encoder.transform(y_test)
num_classes = len(label_encoder.classes_)
#Reshaping the data to be compatible with neural network models
X_train = X_train.reshape(X_train.shape[0], X_train.shape[1], 1)
X_test = X_test.reshape(X_test.shape[0], X_test.shape[1], 1)
Defining Activation Functions:
# SWISH activation function Definition
def swish(x):
return x * tf.sigmoid(x)
# MISH activation function Definition
def mish(x):
return x * tf.math.tanh(tf.math.softplus(x))
# APTx activation function Definition
def aptx(x):
return x * tf.math.tanh(tf.math.softplus(x) + 1)
Feed Forward Neural Network Definition:
def build_ffnn_model(activation_function):
model = tf.keras.Sequential([
tf.keras.layers.Dense(64, activation=activation_function, input_shape=(X_train.shape[1],)),
tf.keras.layers.Dense(32, activation=activation_function),
tf.keras.layers.Dense(num_classes, activation='softmax')
])
return model
Convolutional Neural Network Defintion:
def build_cnn_model(activation_function):
model = tf.keras.Sequential([
tf.keras.layers.Conv1D(16, 3, activation=activation_function, padding='same', input_shape=(X_train.shape[1], 1)),
tf.keras.layers.MaxPooling1D(2),
tf.keras.layers.Conv1D(32, 3, activation=activation_function, padding='same'),
tf.keras.layers.MaxPooling1D(2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128, activation=activation_function),
tf.keras.layers.Dense(10, activation='softmax')
])
return model
Function to Compile, Train and Evaluate Neural Network Models:
def compileandtrainmodel(model_name, model_type, epochs, batch_size):
print(f"Building Model: {model_name}")
model_type.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
model_type.fit(X_train, y_train_encoded, validation_data=(X_test, y_test_encoded), epochs=epochs, batch_size=batch_size)
loss, accuracy = model_type.evaluate(X_test, y_test_encoded)
print(f"Model Name: {model_name} ==> Test Loss: {loss:.4f}, Test Accuracy: {accuracy:.4f}")
Function to calculate metrics like Confusion Matrix, Precision, Recall, F1 Score:
def metricscalculation(modelname, model, X_val, y_val):
# Predicted labels for the model
model_predictions = model.predict(X_val)
model_preditcion_classes = np.argmax(model_predictions, axis=1)
# Confusion matrix for the model
model_confusion_matrix = confusion_matrix(y_val, model_preditcion_classes)
print(f"{modelname} Confusion Matrix:")
print(model_confusion_matrix)
# Extracting values from Confusion Matrix
TP = model_confusion_matrix[1, 1]
FP = model_confusion_matrix[0, 1]
TN = model_confusion_matrix[0, 0]
FN = model_confusion_matrix[1, 0]
# Calculation various metrics for the model
model_precision = precision_score(y_val, model_preditcion_classes)
model_recall = recall_score(y_val, model_preditcion_classes)
model_f1 = f1_score(y_val, model_preditcion_classes)
sensitivity = TP / (TP + FN)
specificity = TN / (TN + FP)
print("True Positives:", TP)
print("False Positives:", FP)
print("True Negatives:", TN)
print("False Negatives:", FN)
modelmetrics_df = pd.DataFrame({
"Metric": ["Precision", "Recall", "F1 Score", "Sensitivity", "Specificity"],
modelname: [model_precision, model_recall, model_f1, sensitivity, specificity]
})
# Print the metrics table
print(modelmetrics_df)
import plotly.graph_objects as go
import pandas as pd
def create_table_with_formatting(actual_values, predicted_values, customer_ids):
# Create DataFrame
data = {'Customer ID': customer_ids, 'True Values': actual_values, 'Predicted Values': predicted_values}
df = pd.DataFrame(data)
df.insert(0, 'S.No', range(1, 1 + len(df)))
# Map 1 to 'Not Churn' and 0 to 'Churn' in 'True Values' and 'Predicted Values' columns
df['True Values'] = df['True Values'].map({1: 'Not Churn', 0: 'Churn'})
df['Predicted Values'] = df['Predicted Values'].map({1: 'Not Churn', 0: 'Churn'})
# Add a correctness column
df['Correct Prediction'] = df['True Values'] == df['Predicted Values']
# Create a table with Plotly Graph Objects
fig = go.Figure(data=[go.Table(
header=dict(values=df.columns),
cells=dict(values=df.transpose().values),
)])
# Conditional formatting for false predictions
color_map = {True: 'palegreen', False: 'lightsalmon'}
cell_colors = [[color_map[correct] for correct in df['Correct Prediction']]]
# Update table layout with conditional formatting
fig.update_traces(cells=dict(fill=dict(color=cell_colors)))
# Customize layout
fig.update_layout(
title_text='Table: Predictions vs True Values',
autosize=True,
width=700, # Total width of the table
margin=dict(l=0, r=0, b=0, t=40), # Adjust margins as needed
)
return fig
Evaluation of Feed Forward Neural Networks with Various Activation Functions:
Feed Forward Neural Network with ReLU Activation Function
model_name = "Feed Forward Neural Network with ReLU Activation Function"
ffnn_model_relu = build_ffnn_model('relu')
compileandtrainmodel(model_name, ffnn_model_relu, 10, 40)
WARNING:tensorflow:From C:\Anaconda\Lib\site-packages\keras\src\backend.py:873: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead. Building Model: Feed Forward Neural Network with ReLU Activation Function WARNING:tensorflow:From C:\Anaconda\Lib\site-packages\keras\src\optimizers\__init__.py:309: The name tf.train.Optimizer is deprecated. Please use tf.compat.v1.train.Optimizer instead. Epoch 1/10 WARNING:tensorflow:From C:\Anaconda\Lib\site-packages\keras\src\utils\tf_utils.py:492: The name tf.ragged.RaggedTensorValue is deprecated. Please use tf.compat.v1.ragged.RaggedTensorValue instead. WARNING:tensorflow:From C:\Anaconda\Lib\site-packages\keras\src\engine\base_layer_utils.py:384: The name tf.executing_eagerly_outside_functions is deprecated. Please use tf.compat.v1.executing_eagerly_outside_functions instead. 132/132 [==============================] - 3s 9ms/step - loss: 0.4393 - accuracy: 0.7962 - val_loss: 0.3823 - val_accuracy: 0.8118 Epoch 2/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3720 - accuracy: 0.8275 - val_loss: 0.3545 - val_accuracy: 0.8316 Epoch 3/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3634 - accuracy: 0.8291 - val_loss: 0.3569 - val_accuracy: 0.8300 Epoch 4/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3585 - accuracy: 0.8349 - val_loss: 0.3561 - val_accuracy: 0.8323 Epoch 5/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3533 - accuracy: 0.8376 - val_loss: 0.3468 - val_accuracy: 0.8392 Epoch 6/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3497 - accuracy: 0.8412 - val_loss: 0.3440 - val_accuracy: 0.8384 Epoch 7/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3472 - accuracy: 0.8387 - val_loss: 0.3448 - val_accuracy: 0.8429 Epoch 8/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3473 - accuracy: 0.8427 - val_loss: 0.3449 - val_accuracy: 0.8361 Epoch 9/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3396 - accuracy: 0.8433 - val_loss: 0.3470 - val_accuracy: 0.8293 Epoch 10/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3427 - accuracy: 0.8433 - val_loss: 0.3418 - val_accuracy: 0.8361 42/42 [==============================] - 0s 3ms/step - loss: 0.3418 - accuracy: 0.8361 Model Name: Feed Forward Neural Network with ReLU Activation Function ==> Test Loss: 0.3418, Test Accuracy: 0.8361
metricscalculation(model_name, ffnn_model_relu, X_test, y_test_encoded)
42/42 [==============================] - 0s 3ms/step
Feed Forward Neural Network with ReLU Activation Function Confusion Matrix:
[[225 158]
[ 58 877]]
True Positives: 877
False Positives: 158
True Negatives: 225
False Negatives: 58
Metric Feed Forward Neural Network with ReLU Activation Function
0 Precision 0.847343
1 Recall 0.937968
2 F1 Score 0.890355
3 Sensitivity 0.937968
4 Specificity 0.587467
## FFNN ReLU
fig = create_table_with_formatting(y_test_encoded, np.argmax(ffnn_model_relu.predict(X_test), axis=1), customerID)
fig.show()
42/42 [==============================] - 0s 4ms/step
Feed Forward Neural Network with SWISH Activation Function
model_name = "Feed Forward Neural Network with SWISH Activation Function"
ffnn_model_swish = build_ffnn_model(swish)
compileandtrainmodel(model_name, ffnn_model_swish, 10, 40)
Building Model: Feed Forward Neural Network with SWISH Activation Function Epoch 1/10 132/132 [==============================] - 2s 8ms/step - loss: 0.4442 - accuracy: 0.7868 - val_loss: 0.3877 - val_accuracy: 0.8171 Epoch 2/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3791 - accuracy: 0.8219 - val_loss: 0.3711 - val_accuracy: 0.8300 Epoch 3/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3720 - accuracy: 0.8275 - val_loss: 0.3626 - val_accuracy: 0.8308 Epoch 4/10 132/132 [==============================] - 1s 7ms/step - loss: 0.3684 - accuracy: 0.8262 - val_loss: 0.3684 - val_accuracy: 0.8308 Epoch 5/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3678 - accuracy: 0.8283 - val_loss: 0.3607 - val_accuracy: 0.8293 Epoch 6/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3656 - accuracy: 0.8285 - val_loss: 0.3583 - val_accuracy: 0.8338 Epoch 7/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3653 - accuracy: 0.8312 - val_loss: 0.3599 - val_accuracy: 0.8247 Epoch 8/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3637 - accuracy: 0.8321 - val_loss: 0.3601 - val_accuracy: 0.8308 Epoch 9/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3641 - accuracy: 0.8327 - val_loss: 0.3594 - val_accuracy: 0.8285 Epoch 10/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3604 - accuracy: 0.8313 - val_loss: 0.3544 - val_accuracy: 0.8331 42/42 [==============================] - 0s 3ms/step - loss: 0.3544 - accuracy: 0.8331 Model Name: Feed Forward Neural Network with SWISH Activation Function ==> Test Loss: 0.3544, Test Accuracy: 0.8331
metricscalculation(model_name, ffnn_model_swish, X_test, y_test_encoded)
42/42 [==============================] - 0s 3ms/step
Feed Forward Neural Network with SWISH Activation Function Confusion Matrix:
[[248 135]
[ 85 850]]
True Positives: 850
False Positives: 135
True Negatives: 248
False Negatives: 85
Metric Feed Forward Neural Network with SWISH Activation Function
0 Precision 0.862944
1 Recall 0.909091
2 F1 Score 0.885417
3 Sensitivity 0.909091
4 Specificity 0.647520
## FFNN SWISH
fig = create_table_with_formatting(y_test_encoded, np.argmax(ffnn_model_swish.predict(X_test), axis=1), customerID)
fig.show()
42/42 [==============================] - 0s 3ms/step
Feed Forward Neural Network with MISH Activation Function
model_name = "Feed Forward Neural Network with MISH Activation Function"
ffnn_model_mish = build_ffnn_model(mish)
compileandtrainmodel(model_name, ffnn_model_mish, 10, 40)
Building Model: Feed Forward Neural Network with MISH Activation Function Epoch 1/10 132/132 [==============================] - 2s 8ms/step - loss: 0.4290 - accuracy: 0.8019 - val_loss: 0.3871 - val_accuracy: 0.8179 Epoch 2/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3797 - accuracy: 0.8228 - val_loss: 0.3692 - val_accuracy: 0.8270 Epoch 3/10 132/132 [==============================] - 1s 7ms/step - loss: 0.3722 - accuracy: 0.8272 - val_loss: 0.3636 - val_accuracy: 0.8285 Epoch 4/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3687 - accuracy: 0.8268 - val_loss: 0.3603 - val_accuracy: 0.8278 Epoch 5/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3689 - accuracy: 0.8285 - val_loss: 0.3585 - val_accuracy: 0.8331 Epoch 6/10 132/132 [==============================] - 1s 7ms/step - loss: 0.3662 - accuracy: 0.8304 - val_loss: 0.3582 - val_accuracy: 0.8285 Epoch 7/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3660 - accuracy: 0.8329 - val_loss: 0.3587 - val_accuracy: 0.8323 Epoch 8/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3646 - accuracy: 0.8332 - val_loss: 0.3569 - val_accuracy: 0.8323 Epoch 9/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3628 - accuracy: 0.8293 - val_loss: 0.3557 - val_accuracy: 0.8346 Epoch 10/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3611 - accuracy: 0.8321 - val_loss: 0.3616 - val_accuracy: 0.8255 42/42 [==============================] - 0s 3ms/step - loss: 0.3616 - accuracy: 0.8255 Model Name: Feed Forward Neural Network with MISH Activation Function ==> Test Loss: 0.3616, Test Accuracy: 0.8255
metricscalculation(model_name, ffnn_model_mish, X_test, y_test_encoded)
42/42 [==============================] - 0s 3ms/step
Feed Forward Neural Network with MISH Activation Function Confusion Matrix:
[[212 171]
[ 59 876]]
True Positives: 876
False Positives: 171
True Negatives: 212
False Negatives: 59
Metric Feed Forward Neural Network with MISH Activation Function
0 Precision 0.836676
1 Recall 0.936898
2 F1 Score 0.883956
3 Sensitivity 0.936898
4 Specificity 0.553525
## FFNN MISH
fig = create_table_with_formatting(y_test_encoded, np.argmax(ffnn_model_mish.predict(X_test), axis=1), customerID)
fig.show()
42/42 [==============================] - 0s 2ms/step
Feed Forward Neural Network with APTx Activation Function
model_name = "Feed Forward Neural Network with APTx Activation Function"
ffnn_model_aptx = build_ffnn_model(aptx)
compileandtrainmodel(model_name, ffnn_model_aptx, 10, 40)
Building Model: Feed Forward Neural Network with APTx Activation Function Epoch 1/10 132/132 [==============================] - 2s 8ms/step - loss: 0.4132 - accuracy: 0.8025 - val_loss: 0.3794 - val_accuracy: 0.8209 Epoch 2/10 132/132 [==============================] - 1s 5ms/step - loss: 0.3769 - accuracy: 0.8215 - val_loss: 0.3692 - val_accuracy: 0.8255 Epoch 3/10 132/132 [==============================] - 1s 5ms/step - loss: 0.3707 - accuracy: 0.8313 - val_loss: 0.3613 - val_accuracy: 0.8293 Epoch 4/10 132/132 [==============================] - 1s 5ms/step - loss: 0.3718 - accuracy: 0.8258 - val_loss: 0.3632 - val_accuracy: 0.8247 Epoch 5/10 132/132 [==============================] - 1s 5ms/step - loss: 0.3701 - accuracy: 0.8317 - val_loss: 0.3611 - val_accuracy: 0.8278 Epoch 6/10 132/132 [==============================] - 1s 6ms/step - loss: 0.3698 - accuracy: 0.8283 - val_loss: 0.3661 - val_accuracy: 0.8263 Epoch 7/10 132/132 [==============================] - 1s 5ms/step - loss: 0.3682 - accuracy: 0.8306 - val_loss: 0.3604 - val_accuracy: 0.8278 Epoch 8/10 132/132 [==============================] - 1s 5ms/step - loss: 0.3687 - accuracy: 0.8308 - val_loss: 0.3597 - val_accuracy: 0.8270 Epoch 9/10 132/132 [==============================] - 1s 5ms/step - loss: 0.3685 - accuracy: 0.8300 - val_loss: 0.3590 - val_accuracy: 0.8308 Epoch 10/10 132/132 [==============================] - 1s 5ms/step - loss: 0.3673 - accuracy: 0.8330 - val_loss: 0.3626 - val_accuracy: 0.8278 42/42 [==============================] - 0s 2ms/step - loss: 0.3626 - accuracy: 0.8278 Model Name: Feed Forward Neural Network with APTx Activation Function ==> Test Loss: 0.3626, Test Accuracy: 0.8278
metricscalculation(model_name, ffnn_model_aptx, X_test, y_test_encoded)
42/42 [==============================] - 0s 2ms/step
Feed Forward Neural Network with APTx Activation Function Confusion Matrix:
[[244 139]
[ 88 847]]
True Positives: 847
False Positives: 139
True Negatives: 244
False Negatives: 88
Metric Feed Forward Neural Network with APTx Activation Function
0 Precision 0.859026
1 Recall 0.905882
2 F1 Score 0.881832
3 Sensitivity 0.905882
4 Specificity 0.637076
## FFNN APTx
fig = create_table_with_formatting(y_test_encoded, np.argmax(ffnn_model_aptx.predict(X_test), axis=1), customerID)
fig.show()
42/42 [==============================] - 0s 4ms/step
Evaluation of Convolutional Neural Networks with Various Activation Functions:
Convolutional Neural Network with ReLU Activation Function:
model_name = "Convolutional Neural Network with ReLU Activation Function"
cnn_model_relu = build_ffnn_model('relu')
compileandtrainmodel(model_name, cnn_model_relu, 10, 32)
Building Model: Convolutional Neural Network with ReLU Activation Function Epoch 1/10 165/165 [==============================] - 2s 8ms/step - loss: 0.4529 - accuracy: 0.8021 - val_loss: 0.3833 - val_accuracy: 0.8209 Epoch 2/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3775 - accuracy: 0.8247 - val_loss: 0.3651 - val_accuracy: 0.8270 Epoch 3/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3663 - accuracy: 0.8293 - val_loss: 0.3524 - val_accuracy: 0.8323 Epoch 4/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3572 - accuracy: 0.8359 - val_loss: 0.3474 - val_accuracy: 0.8293 Epoch 5/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3518 - accuracy: 0.8372 - val_loss: 0.3452 - val_accuracy: 0.8437 Epoch 6/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3481 - accuracy: 0.8420 - val_loss: 0.3392 - val_accuracy: 0.8414 Epoch 7/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3444 - accuracy: 0.8406 - val_loss: 0.3419 - val_accuracy: 0.8331 Epoch 8/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3383 - accuracy: 0.8461 - val_loss: 0.3463 - val_accuracy: 0.8323 Epoch 9/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3375 - accuracy: 0.8448 - val_loss: 0.3367 - val_accuracy: 0.8376 Epoch 10/10 165/165 [==============================] - 1s 6ms/step - loss: 0.3341 - accuracy: 0.8478 - val_loss: 0.3355 - val_accuracy: 0.8437 42/42 [==============================] - 0s 3ms/step - loss: 0.3355 - accuracy: 0.8437 Model Name: Convolutional Neural Network with ReLU Activation Function ==> Test Loss: 0.3355, Test Accuracy: 0.8437
metricscalculation(model_name, cnn_model_relu, X_test, y_test_encoded)
42/42 [==============================] - 0s 3ms/step
Convolutional Neural Network with ReLU Activation Function Confusion Matrix:
[[252 131]
[ 75 860]]
True Positives: 860
False Positives: 131
True Negatives: 252
False Negatives: 75
Metric Convolutional Neural Network with ReLU Activation Function
0 Precision 0.867810
1 Recall 0.919786
2 F1 Score 0.893043
3 Sensitivity 0.919786
4 Specificity 0.657963
## CNN ReLU
fig = create_table_with_formatting(y_test_encoded, np.argmax(cnn_model_relu.predict(X_test), axis=1), customerID)
fig.show()
42/42 [==============================] - 0s 3ms/step
Convolutional Neural Network with SWISH Activation Function:
model_name = "Convolutional Neural Network with SWISH Activation Function"
cnn_model_swish = build_ffnn_model(swish)
compileandtrainmodel(model_name, cnn_model_swish, 10, 32)
Building Model: Convolutional Neural Network with SWISH Activation Function Epoch 1/10 165/165 [==============================] - 2s 7ms/step - loss: 0.4313 - accuracy: 0.8012 - val_loss: 0.3817 - val_accuracy: 0.8164 Epoch 2/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3778 - accuracy: 0.8256 - val_loss: 0.3655 - val_accuracy: 0.8232 Epoch 3/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3716 - accuracy: 0.8304 - val_loss: 0.3665 - val_accuracy: 0.8225 Epoch 4/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3696 - accuracy: 0.8274 - val_loss: 0.3629 - val_accuracy: 0.8240 Epoch 5/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3694 - accuracy: 0.8275 - val_loss: 0.3665 - val_accuracy: 0.8202 Epoch 6/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3658 - accuracy: 0.8287 - val_loss: 0.3592 - val_accuracy: 0.8369 Epoch 7/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3664 - accuracy: 0.8315 - val_loss: 0.3601 - val_accuracy: 0.8354 Epoch 8/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3636 - accuracy: 0.8329 - val_loss: 0.3576 - val_accuracy: 0.8293 Epoch 9/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3624 - accuracy: 0.8327 - val_loss: 0.3569 - val_accuracy: 0.8338 Epoch 10/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3588 - accuracy: 0.8374 - val_loss: 0.3532 - val_accuracy: 0.8361 42/42 [==============================] - 0s 2ms/step - loss: 0.3532 - accuracy: 0.8361 Model Name: Convolutional Neural Network with SWISH Activation Function ==> Test Loss: 0.3532, Test Accuracy: 0.8361
metricscalculation(model_name, cnn_model_swish, X_test, y_test_encoded)
42/42 [==============================] - 0s 3ms/step
Convolutional Neural Network with SWISH Activation Function Confusion Matrix:
[[241 142]
[ 74 861]]
True Positives: 861
False Positives: 142
True Negatives: 241
False Negatives: 74
Metric Convolutional Neural Network with SWISH Activation Function
0 Precision 0.858425
1 Recall 0.920856
2 F1 Score 0.888545
3 Sensitivity 0.920856
4 Specificity 0.629243
## CNN SWISH
fig = create_table_with_formatting(y_test_encoded, np.argmax(cnn_model_swish.predict(X_test), axis=1), customerID)
fig.show()
42/42 [==============================] - 0s 4ms/step
Convolutional Neural Network with MISH Activation Function:
model_name = "Convolutional Neural Network with MISH Activation Function"
cnn_model_mish = build_ffnn_model(mish)
compileandtrainmodel(model_name, cnn_model_mish, 10, 32)
Building Model: Convolutional Neural Network with MISH Activation Function Epoch 1/10 165/165 [==============================] - 2s 6ms/step - loss: 0.4266 - accuracy: 0.7955 - val_loss: 0.3785 - val_accuracy: 0.8217 Epoch 2/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3770 - accuracy: 0.8247 - val_loss: 0.3687 - val_accuracy: 0.8285 Epoch 3/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3709 - accuracy: 0.8238 - val_loss: 0.3626 - val_accuracy: 0.8285 Epoch 4/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3700 - accuracy: 0.8281 - val_loss: 0.3611 - val_accuracy: 0.8278 Epoch 5/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3682 - accuracy: 0.8294 - val_loss: 0.3596 - val_accuracy: 0.8316 Epoch 6/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3660 - accuracy: 0.8293 - val_loss: 0.3637 - val_accuracy: 0.8209 Epoch 7/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3644 - accuracy: 0.8298 - val_loss: 0.3550 - val_accuracy: 0.8293 Epoch 8/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3628 - accuracy: 0.8334 - val_loss: 0.3556 - val_accuracy: 0.8323 Epoch 9/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3612 - accuracy: 0.8321 - val_loss: 0.3557 - val_accuracy: 0.8346 Epoch 10/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3605 - accuracy: 0.8327 - val_loss: 0.3502 - val_accuracy: 0.8331 42/42 [==============================] - 0s 2ms/step - loss: 0.3502 - accuracy: 0.8331 Model Name: Convolutional Neural Network with MISH Activation Function ==> Test Loss: 0.3502, Test Accuracy: 0.8331
metricscalculation(model_name, cnn_model_mish, X_test, y_test_encoded)
42/42 [==============================] - 0s 3ms/step
Convolutional Neural Network with MISH Activation Function Confusion Matrix:
[[242 141]
[ 79 856]]
True Positives: 856
False Positives: 141
True Negatives: 242
False Negatives: 79
Metric Convolutional Neural Network with MISH Activation Function
0 Precision 0.858576
1 Recall 0.915508
2 F1 Score 0.886128
3 Sensitivity 0.915508
4 Specificity 0.631854
## CNN MISH
fig = create_table_with_formatting(y_test_encoded, np.argmax(cnn_model_mish.predict(X_test), axis=1), customerID)
fig.show()
42/42 [==============================] - 0s 4ms/step
Convolutional Neural Network with APTx Activation Function:
model_name = "Convolutional Neural Network with APTx Activation Function"
cnn_model_aptx = build_ffnn_model(aptx)
compileandtrainmodel(model_name, cnn_model_aptx, 10, 32)
Building Model: Convolutional Neural Network with APTx Activation Function Epoch 1/10 165/165 [==============================] - 2s 7ms/step - loss: 0.4140 - accuracy: 0.8029 - val_loss: 0.3791 - val_accuracy: 0.8217 Epoch 2/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3763 - accuracy: 0.8234 - val_loss: 0.3790 - val_accuracy: 0.8187 Epoch 3/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3748 - accuracy: 0.8238 - val_loss: 0.3704 - val_accuracy: 0.8209 Epoch 4/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3715 - accuracy: 0.8281 - val_loss: 0.3652 - val_accuracy: 0.8285 Epoch 5/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3699 - accuracy: 0.8279 - val_loss: 0.3609 - val_accuracy: 0.8270 Epoch 6/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3699 - accuracy: 0.8264 - val_loss: 0.3722 - val_accuracy: 0.8232 Epoch 7/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3701 - accuracy: 0.8255 - val_loss: 0.3624 - val_accuracy: 0.8202 Epoch 8/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3697 - accuracy: 0.8289 - val_loss: 0.3702 - val_accuracy: 0.8202 Epoch 9/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3691 - accuracy: 0.8258 - val_loss: 0.3629 - val_accuracy: 0.8316 Epoch 10/10 165/165 [==============================] - 1s 5ms/step - loss: 0.3676 - accuracy: 0.8287 - val_loss: 0.3648 - val_accuracy: 0.8270 42/42 [==============================] - 0s 2ms/step - loss: 0.3648 - accuracy: 0.8270 Model Name: Convolutional Neural Network with APTx Activation Function ==> Test Loss: 0.3648, Test Accuracy: 0.8270
metricscalculation(model_name, cnn_model_aptx, X_test, y_test_encoded)
42/42 [==============================] - 0s 2ms/step
Convolutional Neural Network with APTx Activation Function Confusion Matrix:
[[218 165]
[ 63 872]]
True Positives: 872
False Positives: 165
True Negatives: 218
False Negatives: 63
Metric Convolutional Neural Network with APTx Activation Function
0 Precision 0.840887
1 Recall 0.932620
2 F1 Score 0.884381
3 Sensitivity 0.932620
4 Specificity 0.569191
## CNN APTx
fig = create_table_with_formatting(y_test_encoded, np.argmax(cnn_model_aptx.predict(X_test), axis=1), customerID)
fig.show()
42/42 [==============================] - 0s 4ms/step
Ensemble Model of CNN and FFNN:
# Combining the outputs of CNN and FFNN Models
combinedInput = concatenate([cnn_model_swish.output, ffnn_model_swish.output])
# Add Fully Connected layers for Ensemble Model
x = Dense(4, activation=mish)(combinedInput)
x = Dense(1, activation="sigmoid")(x)
# Creating the Ensemble Model
ensemble_model = Model(inputs=[cnn_model_swish.input, ffnn_model_swish.input], outputs=x)
# Compiling the Ensemble Model
ensemble_model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# Training the Ensemble Model
ensemble_model.fit([X_train, X_train], y_train_encoded, validation_data = ([X_test, X_test], y_test_encoded), epochs=10, batch_size=32, verbose=1)
# Making predictions with the Ensemble Model
ensemble_predictions = ensemble_model.predict([X_test, X_test])
ensemble_classes = np.round(ensemble_predictions)
# Calculating the accuracy of Ensemble Model
accuracy = accuracy_score(y_test_encoded, ensemble_classes)
print(f"Ensemble Accuracy: {accuracy:.4f}")
Epoch 1/10 165/165 [==============================] - 5s 11ms/step - loss: 0.5377 - accuracy: 0.8245 - val_loss: 0.4797 - val_accuracy: 0.8171 Epoch 2/10 165/165 [==============================] - 1s 9ms/step - loss: 0.4374 - accuracy: 0.8317 - val_loss: 0.4239 - val_accuracy: 0.8225 Epoch 3/10 165/165 [==============================] - 2s 9ms/step - loss: 0.3972 - accuracy: 0.8348 - val_loss: 0.3843 - val_accuracy: 0.8346 Epoch 4/10 165/165 [==============================] - 2s 9ms/step - loss: 0.3760 - accuracy: 0.8380 - val_loss: 0.3685 - val_accuracy: 0.8414 Epoch 5/10 165/165 [==============================] - 1s 9ms/step - loss: 0.3657 - accuracy: 0.8389 - val_loss: 0.3635 - val_accuracy: 0.8369 Epoch 6/10 165/165 [==============================] - 1s 9ms/step - loss: 0.3583 - accuracy: 0.8393 - val_loss: 0.3561 - val_accuracy: 0.8414 Epoch 7/10 165/165 [==============================] - 1s 8ms/step - loss: 0.3550 - accuracy: 0.8408 - val_loss: 0.3566 - val_accuracy: 0.8369 Epoch 8/10 165/165 [==============================] - 1s 9ms/step - loss: 0.3524 - accuracy: 0.8406 - val_loss: 0.3474 - val_accuracy: 0.8505 Epoch 9/10 165/165 [==============================] - 1s 8ms/step - loss: 0.3493 - accuracy: 0.8433 - val_loss: 0.3488 - val_accuracy: 0.8323 Epoch 10/10 165/165 [==============================] - 1s 8ms/step - loss: 0.3450 - accuracy: 0.8425 - val_loss: 0.3465 - val_accuracy: 0.8376 42/42 [==============================] - 0s 3ms/step Ensemble Accuracy: 0.8376
## Ensemble Predictions
fig = create_table_with_formatting(y_test_encoded, ensemble_classes[0][0], customerID)
fig.show()
Hyper Parameter Tuning and Cross Validation to Boost the Ensemble Model with XGBoost:
# Reshaping ensemble_predictions to be a 2-dimensional matrix
ensemble_predictions_reshaped = np.reshape(ensemble_predictions, (len(ensemble_predictions), -1))
# Defining the hyperparameter grid
param_grid = {
'n_estimators': [450, 460, 470, 480],
'max_depth': [10, 11, 12],
'learning_rate': [0.3, 0.4, 0.5]
}
# Initializing the XGBoost classifier
xgb_classifier = XGBClassifier()
# Initializing GridSearchCV with cross-validation
grid_search = GridSearchCV(estimator=xgb_classifier, param_grid=param_grid, scoring='accuracy', cv=3)
# Fitting the grid search to the data
grid_search.fit(ensemble_predictions_reshaped, y_test_encoded)
# Obtaining the best hyperparameters and model
best_params = grid_search.best_params_
best_model = grid_search.best_estimator_
# Printing the best parameters
print("Best Hyperparameters:", best_params)
# Storing the best parameters
xgb_n_estimators = best_params['n_estimators']
xgb_max_depth = best_params['max_depth']
xgb_learning_rate = best_params['learning_rate']
# Making predictions using the best model
best_predictions = best_model.predict(ensemble_predictions_reshaped)
# Calculating accuracy using the true labels
accuracy = accuracy_score(y_test_encoded, best_predictions)
print(f"Accuracy Score on Test Set: {accuracy:.4f}")
Best Hyperparameters: {'learning_rate': 0.3, 'max_depth': 10, 'n_estimators': 450}
Accuracy Score on Test Set: 0.9985
Performing XGBoost on Ensemble Model:
# Converting ensemble predictions into binary classes
ensemble_classes = np.round(ensemble_predictions)
# Calculating ensemble accuracy
ensemble_accuracy = accuracy_score(y_test_encoded, ensemble_classes)
print(f"Ensemble Accuracy: {ensemble_accuracy:.4f}")
# Training XGBoost Model on the ensemble predictions
xgb_model = xgb.XGBClassifier(n_estimators= xgb_n_estimators, max_depth = xgb_max_depth, learning_rate = xgb_learning_rate)
xgb_model.fit(ensemble_predictions, y_test_encoded)
# Predicting using the XGBoost Model
xgb_predictions = xgb_model.predict(ensemble_predictions)
# Calculating the XGBoost Model accuracy
xgb_accuracy = accuracy_score(y_test_encoded, xgb_predictions)
print(f"XGBoost Accuracy: {xgb_accuracy:.4f}")
Ensemble Accuracy: 0.8376 XGBoost Accuracy: 0.9985
## XGBoost Predictions
fig = create_table_with_formatting(y_test_encoded, xgb_predictions, customerID)
fig.show()
Generating Confusion Matrix and Calculating Metrics for Ensemble Model and XGBoost Model:
# Calculating confusion matrix for ensemble predictions
ensemble_confusion_matrix = confusion_matrix(y_test_encoded, ensemble_classes)
print("Ensemble Confusion Matrix:")
print(ensemble_confusion_matrix)
# Extracting values from Ensemble Confusion Matrix
ensemble_TP = ensemble_confusion_matrix[1, 1]
ensemble_FP = ensemble_confusion_matrix[0, 1]
ensemble_TN = ensemble_confusion_matrix[0, 0]
ensemble_FN = ensemble_confusion_matrix[1, 0]
# Calculating precision, recall, F1 score, Sensitivity and Specificity for Ensemble model
ensemble_precision = precision_score(y_test_encoded, ensemble_classes)
ensemble_recall = recall_score(y_test_encoded, ensemble_classes)
ensemble_f1 = f1_score(y_test_encoded, ensemble_classes)
ensemble_sensitivity = ensemble_TP / (ensemble_TP + ensemble_FN)
ensemble_specificity = ensemble_TN / (ensemble_TN + ensemble_FP)
Ensemble Confusion Matrix: [[238 145] [ 69 866]]
# Calculating confusion matrix for XGBoost predictions
xgb_confusion_matrix = confusion_matrix(y_test_encoded, xgb_predictions)
print("XGBoost Confusion Matrix:")
print(xgb_confusion_matrix)
# Extracting values from Ensemble Confusion Matrix
xgb_TP = xgb_confusion_matrix[1, 1]
xgb_FP = xgb_confusion_matrix[0, 1]
xgb_TN = xgb_confusion_matrix[0, 0]
xgb_FN = xgb_confusion_matrix[1, 0]
# Calculating precision, recall, F1 score, Sensitivity and Specificity for XGBoost model
xgb_precision = precision_score(y_test_encoded, xgb_predictions)
xgb_recall = recall_score(y_test_encoded, xgb_predictions)
xgb_f1 = f1_score(y_test_encoded, xgb_predictions)
xgb_sensitivity = xgb_TP / (xgb_TP + xgb_FN)
xgb_specificity = xgb_TN / (xgb_TN + xgb_FP)
XGBoost Confusion Matrix: [[382 1] [ 1 934]]
# Creating a DataFrame to store the metrics
metrics_df = pd.DataFrame({
"Metric": ["Precision", "Recall", "F1 Score", "Sensitivity", "Specificity"],
"Ensemble": [ensemble_precision, ensemble_recall, ensemble_f1, ensemble_sensitivity, ensemble_specificity],
"XGBoost": [xgb_precision, xgb_recall, xgb_f1, xgb_sensitivity, xgb_specificity]
})
# Print the metrics table
print(metrics_df)
Metric Ensemble XGBoost 0 Precision 0.856578 0.998930 1 Recall 0.926203 0.998930 2 F1 Score 0.890031 0.998930 3 Sensitivity 0.926203 0.998930 4 Specificity 0.621410 0.997389
Visualizing the Confusion Matrices of Ensemble Model and XGBoost Model:
# Defining class labels
class_labels = ["Negative", "Positive"]
# Setting the figure size
plt.figure(figsize=(10, 5))
# Plotting Ensemble Model Confusion Matrix
plt.subplot(1, 2, 1)
sns.heatmap(ensemble_confusion_matrix, annot=True, fmt="d", cmap="Blues", xticklabels=class_labels, yticklabels=class_labels)
plt.title("Confusion Matrix - Ensemble Model")
plt.xlabel("Predicted")
plt.ylabel("Actual")
# Plotting XGBoost Model Confusion Matrix
plt.subplot(1, 2, 2)
sns.heatmap(xgb_confusion_matrix, annot=True, fmt="d", cmap="Blues", xticklabels=class_labels, yticklabels=class_labels)
plt.title("Confusion Matrix - XGBoost Model")
plt.xlabel("Predicted")
plt.ylabel("Actual")
plt.tight_layout()
plt.show()
Calculating ROC Curve and AUC for XGBoost Model:
# Obtaining the predicted probabilities from the XGBoost model
xgb_probabilities = xgb_model.predict_proba(ensemble_predictions)[:, 1]
# Computing ROC curve and AUC for the XGBoost model
fpr_xgb, tpr_xgb, _ = roc_curve(y_test_encoded, xgb_probabilities)
roc_auc_xgb = auc(fpr_xgb, tpr_xgb)
# Plotting the ROC curve for the XGBoost model
plt.figure()
plt.plot(fpr_xgb, tpr_xgb, color='darkorange', lw=2, label='XGBoost (AUC = %0.2f)' % roc_auc_xgb)
plt.plot([0, 1], [0, 1], color='navy', lw=2, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver Operating Characteristic')
plt.legend(loc="lower right")
plt.show()